diff --git a/packages/google-cloud-compute/docs/compute_v1/instance_group_manager_resize_requests.rst b/packages/google-cloud-compute/docs/compute_v1/instance_group_manager_resize_requests.rst new file mode 100644 index 000000000000..7a95a2fc2175 --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/instance_group_manager_resize_requests.rst @@ -0,0 +1,10 @@ +InstanceGroupManagerResizeRequests +---------------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.instance_group_manager_resize_requests + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.instance_group_manager_resize_requests.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/instance_settings_service.rst b/packages/google-cloud-compute/docs/compute_v1/instance_settings_service.rst new file mode 100644 index 000000000000..b84b2e39f0bb --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/instance_settings_service.rst @@ -0,0 +1,6 @@ +InstanceSettingsService +----------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.instance_settings_service + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/services_.rst b/packages/google-cloud-compute/docs/compute_v1/services_.rst index afd320f55e0f..1b6d820b7b4e 100644 --- a/packages/google-cloud-compute/docs/compute_v1/services_.rst +++ b/packages/google-cloud-compute/docs/compute_v1/services_.rst @@ -23,9 +23,11 @@ Services for Google Cloud Compute v1 API health_checks image_family_views images + instance_group_manager_resize_requests instance_group_managers instance_groups instances + instance_settings_service instance_templates instant_snapshots interconnect_attachments @@ -83,6 +85,8 @@ Services for Google Cloud Compute v1 API snapshot_settings_service ssl_certificates ssl_policies + storage_pools + storage_pool_types subnetworks target_grpc_proxies target_http_proxies diff --git a/packages/google-cloud-compute/docs/compute_v1/storage_pool_types.rst b/packages/google-cloud-compute/docs/compute_v1/storage_pool_types.rst new file mode 100644 index 000000000000..e3e4a1939e62 --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/storage_pool_types.rst @@ -0,0 +1,10 @@ +StoragePoolTypes +---------------------------------- + +.. automodule:: google.cloud.compute_v1.services.storage_pool_types + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.storage_pool_types.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/storage_pools.rst b/packages/google-cloud-compute/docs/compute_v1/storage_pools.rst new file mode 100644 index 000000000000..2b2d9f14fbff --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/storage_pools.rst @@ -0,0 +1,10 @@ +StoragePools +------------------------------ + +.. automodule:: google.cloud.compute_v1.services.storage_pools + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.storage_pools.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/google/cloud/compute/__init__.py b/packages/google-cloud-compute/google/cloud/compute/__init__.py index 3a7c9355f69d..0a481c591240 100644 --- a/packages/google-cloud-compute/google/cloud/compute/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute/__init__.py @@ -62,10 +62,16 @@ ImageFamilyViewsClient, ) from google.cloud.compute_v1.services.images.client import ImagesClient +from google.cloud.compute_v1.services.instance_group_manager_resize_requests.client import ( + InstanceGroupManagerResizeRequestsClient, +) from google.cloud.compute_v1.services.instance_group_managers.client import ( InstanceGroupManagersClient, ) from google.cloud.compute_v1.services.instance_groups.client import InstanceGroupsClient +from google.cloud.compute_v1.services.instance_settings_service.client import ( + InstanceSettingsServiceClient, +) from google.cloud.compute_v1.services.instance_templates.client import ( InstanceTemplatesClient, ) @@ -200,6 +206,10 @@ SslCertificatesClient, ) from google.cloud.compute_v1.services.ssl_policies.client import SslPoliciesClient +from google.cloud.compute_v1.services.storage_pool_types.client import ( + StoragePoolTypesClient, +) +from google.cloud.compute_v1.services.storage_pools.client import StoragePoolsClient from google.cloud.compute_v1.services.subnetworks.client import SubnetworksClient from google.cloud.compute_v1.services.target_grpc_proxies.client import ( TargetGrpcProxiesClient, @@ -294,6 +304,8 @@ AggregatedListServiceAttachmentsRequest, AggregatedListSslCertificatesRequest, AggregatedListSslPoliciesRequest, + AggregatedListStoragePoolsRequest, + AggregatedListStoragePoolTypesRequest, AggregatedListSubnetworksRequest, AggregatedListTargetHttpProxiesRequest, AggregatedListTargetHttpsProxiesRequest, @@ -378,6 +390,7 @@ BulkInsertRegionInstanceRequest, CacheInvalidationRule, CacheKeyPolicy, + CancelInstanceGroupManagerResizeRequestRequest, CircuitBreakers, CloneRulesFirewallPolicyRequest, CloneRulesNetworkFirewallPolicyRequest, @@ -420,6 +433,7 @@ DeleteHealthCheckRequest, DeleteImageRequest, DeleteInstanceGroupManagerRequest, + DeleteInstanceGroupManagerResizeRequestRequest, DeleteInstanceGroupRequest, DeleteInstanceRequest, DeleteInstancesInstanceGroupManagerRequest, @@ -474,6 +488,7 @@ DeleteSnapshotRequest, DeleteSslCertificateRequest, DeleteSslPolicyRequest, + DeleteStoragePoolRequest, DeleteSubnetworkRequest, DeleteTargetGrpcProxyRequest, DeleteTargetHttpProxyRequest, @@ -605,12 +620,15 @@ GetIamPolicyResourcePolicyRequest, GetIamPolicyServiceAttachmentRequest, GetIamPolicySnapshotRequest, + GetIamPolicyStoragePoolRequest, GetIamPolicySubnetworkRequest, GetImageFamilyViewRequest, GetImageRequest, GetInstanceGroupManagerRequest, + GetInstanceGroupManagerResizeRequestRequest, GetInstanceGroupRequest, GetInstanceRequest, + GetInstanceSettingRequest, GetInstanceTemplateRequest, GetInstantSnapshotRequest, GetInterconnectAttachmentRequest, @@ -679,6 +697,8 @@ GetSslCertificateRequest, GetSslPolicyRequest, GetStatusVpnGatewayRequest, + GetStoragePoolRequest, + GetStoragePoolTypeRequest, GetSubnetworkRequest, GetTargetGrpcProxyRequest, GetTargetHttpProxyRequest, @@ -755,6 +775,7 @@ InsertHealthCheckRequest, InsertImageRequest, InsertInstanceGroupManagerRequest, + InsertInstanceGroupManagerResizeRequestRequest, InsertInstanceGroupRequest, InsertInstanceRequest, InsertInstanceTemplateRequest, @@ -801,6 +822,7 @@ InsertSnapshotRequest, InsertSslCertificateRequest, InsertSslPolicyRequest, + InsertStoragePoolRequest, InsertSubnetworkRequest, InsertTargetGrpcProxyRequest, InsertTargetHttpProxyRequest, @@ -827,6 +849,10 @@ InstanceGroupManagerAutoHealingPolicy, InstanceGroupManagerInstanceLifecyclePolicy, InstanceGroupManagerList, + InstanceGroupManagerResizeRequest, + InstanceGroupManagerResizeRequestsListResponse, + InstanceGroupManagerResizeRequestStatus, + InstanceGroupManagerResizeRequestStatusLastAttempt, InstanceGroupManagersAbandonInstancesRequest, InstanceGroupManagersApplyUpdatesRequest, InstanceGroupManagersCreateInstancesRequest, @@ -866,6 +892,8 @@ InstanceReference, InstancesAddResourcePoliciesRequest, InstancesBulkInsertOperationMetadata, + InstanceSettings, + InstanceSettingsMetadata, InstancesGetEffectiveFirewallsResponse, InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, InstancesRemoveResourcePoliciesRequest, @@ -938,6 +966,7 @@ ListBackendBucketsRequest, ListBackendServicesRequest, ListDisksRequest, + ListDisksStoragePoolsRequest, ListDiskTypesRequest, ListErrorsInstanceGroupManagersRequest, ListErrorsRegionInstanceGroupManagersRequest, @@ -953,6 +982,7 @@ ListGlobalPublicDelegatedPrefixesRequest, ListHealthChecksRequest, ListImagesRequest, + ListInstanceGroupManagerResizeRequestsRequest, ListInstanceGroupManagersRequest, ListInstanceGroupsRequest, ListInstancesInstanceGroupsRequest, @@ -1021,6 +1051,8 @@ ListSnapshotsRequest, ListSslCertificatesRequest, ListSslPoliciesRequest, + ListStoragePoolsRequest, + ListStoragePoolTypesRequest, ListSubnetworksRequest, ListTargetGrpcProxiesRequest, ListTargetHttpProxiesRequest, @@ -1113,6 +1145,7 @@ NodeGroupsAddNodesRequest, NodeGroupsDeleteNodesRequest, NodeGroupsListNodes, + NodeGroupsPerformMaintenanceRequest, NodeGroupsScopedList, NodeGroupsSetNodeTemplateRequest, NodeGroupsSimulateMaintenanceEventRequest, @@ -1155,6 +1188,7 @@ PatchHealthCheckRequest, PatchImageRequest, PatchInstanceGroupManagerRequest, + PatchInstanceSettingRequest, PatchInterconnectAttachmentRequest, PatchInterconnectRequest, PatchNetworkAttachmentRequest, @@ -1196,6 +1230,7 @@ PathMatcher, PathRule, PerformMaintenanceInstanceRequest, + PerformMaintenanceNodeGroupRequest, PerInstanceConfig, Policy, PreconfiguredWafSet, @@ -1221,6 +1256,7 @@ PublicDelegatedPrefixPublicDelegatedSubPrefix, Quota, QuotaExceededInfo, + QuotaStatusWarning, RawDisk, RecreateInstancesInstanceGroupManagerRequest, RecreateInstancesRegionInstanceGroupManagerRequest, @@ -1422,6 +1458,7 @@ SetIamPolicyResourcePolicyRequest, SetIamPolicyServiceAttachmentRequest, SetIamPolicySnapshotRequest, + SetIamPolicyStoragePoolRequest, SetIamPolicySubnetworkRequest, SetInstanceTemplateInstanceGroupManagerRequest, SetInstanceTemplateRegionInstanceGroupManagerRequest, @@ -1522,6 +1559,17 @@ StopGroupAsyncReplicationDiskRequest, StopGroupAsyncReplicationRegionDiskRequest, StopInstanceRequest, + StoragePool, + StoragePoolAggregatedList, + StoragePoolDisk, + StoragePoolList, + StoragePoolListDisks, + StoragePoolResourceStatus, + StoragePoolsScopedList, + StoragePoolType, + StoragePoolTypeAggregatedList, + StoragePoolTypeList, + StoragePoolTypesScopedList, Subnetwork, SubnetworkAggregatedList, SubnetworkList, @@ -1605,6 +1653,7 @@ TestIamPermissionsResourcePolicyRequest, TestIamPermissionsServiceAttachmentRequest, TestIamPermissionsSnapshotRequest, + TestIamPermissionsStoragePoolRequest, TestIamPermissionsSubnetworkRequest, TestIamPermissionsVpnGatewayRequest, TestPermissionsRequest, @@ -1633,6 +1682,7 @@ UpdateReservationRequest, UpdateRouterRequest, UpdateShieldedInstanceConfigInstanceRequest, + UpdateStoragePoolRequest, UpdateUrlMapRequest, UrlMap, UrlMapList, @@ -1708,9 +1758,11 @@ "HealthChecksClient", "ImageFamilyViewsClient", "ImagesClient", + "InstanceGroupManagerResizeRequestsClient", "InstanceGroupManagersClient", "InstanceGroupsClient", "InstancesClient", + "InstanceSettingsServiceClient", "InstanceTemplatesClient", "InstantSnapshotsClient", "InterconnectAttachmentsClient", @@ -1768,6 +1820,8 @@ "SnapshotSettingsServiceClient", "SslCertificatesClient", "SslPoliciesClient", + "StoragePoolsClient", + "StoragePoolTypesClient", "SubnetworksClient", "TargetGrpcProxiesClient", "TargetHttpProxiesClient", @@ -1847,6 +1901,8 @@ "AggregatedListServiceAttachmentsRequest", "AggregatedListSslCertificatesRequest", "AggregatedListSslPoliciesRequest", + "AggregatedListStoragePoolsRequest", + "AggregatedListStoragePoolTypesRequest", "AggregatedListSubnetworksRequest", "AggregatedListTargetHttpProxiesRequest", "AggregatedListTargetHttpsProxiesRequest", @@ -1931,6 +1987,7 @@ "BulkInsertRegionInstanceRequest", "CacheInvalidationRule", "CacheKeyPolicy", + "CancelInstanceGroupManagerResizeRequestRequest", "CircuitBreakers", "CloneRulesFirewallPolicyRequest", "CloneRulesNetworkFirewallPolicyRequest", @@ -1973,6 +2030,7 @@ "DeleteHealthCheckRequest", "DeleteImageRequest", "DeleteInstanceGroupManagerRequest", + "DeleteInstanceGroupManagerResizeRequestRequest", "DeleteInstanceGroupRequest", "DeleteInstanceRequest", "DeleteInstancesInstanceGroupManagerRequest", @@ -2027,6 +2085,7 @@ "DeleteSnapshotRequest", "DeleteSslCertificateRequest", "DeleteSslPolicyRequest", + "DeleteStoragePoolRequest", "DeleteSubnetworkRequest", "DeleteTargetGrpcProxyRequest", "DeleteTargetHttpProxyRequest", @@ -2158,12 +2217,15 @@ "GetIamPolicyResourcePolicyRequest", "GetIamPolicyServiceAttachmentRequest", "GetIamPolicySnapshotRequest", + "GetIamPolicyStoragePoolRequest", "GetIamPolicySubnetworkRequest", "GetImageFamilyViewRequest", "GetImageRequest", "GetInstanceGroupManagerRequest", + "GetInstanceGroupManagerResizeRequestRequest", "GetInstanceGroupRequest", "GetInstanceRequest", + "GetInstanceSettingRequest", "GetInstanceTemplateRequest", "GetInstantSnapshotRequest", "GetInterconnectAttachmentRequest", @@ -2232,6 +2294,8 @@ "GetSslCertificateRequest", "GetSslPolicyRequest", "GetStatusVpnGatewayRequest", + "GetStoragePoolRequest", + "GetStoragePoolTypeRequest", "GetSubnetworkRequest", "GetTargetGrpcProxyRequest", "GetTargetHttpProxyRequest", @@ -2308,6 +2372,7 @@ "InsertHealthCheckRequest", "InsertImageRequest", "InsertInstanceGroupManagerRequest", + "InsertInstanceGroupManagerResizeRequestRequest", "InsertInstanceGroupRequest", "InsertInstanceRequest", "InsertInstanceTemplateRequest", @@ -2354,6 +2419,7 @@ "InsertSnapshotRequest", "InsertSslCertificateRequest", "InsertSslPolicyRequest", + "InsertStoragePoolRequest", "InsertSubnetworkRequest", "InsertTargetGrpcProxyRequest", "InsertTargetHttpProxyRequest", @@ -2380,6 +2446,10 @@ "InstanceGroupManagerAutoHealingPolicy", "InstanceGroupManagerInstanceLifecyclePolicy", "InstanceGroupManagerList", + "InstanceGroupManagerResizeRequest", + "InstanceGroupManagerResizeRequestsListResponse", + "InstanceGroupManagerResizeRequestStatus", + "InstanceGroupManagerResizeRequestStatusLastAttempt", "InstanceGroupManagersAbandonInstancesRequest", "InstanceGroupManagersApplyUpdatesRequest", "InstanceGroupManagersCreateInstancesRequest", @@ -2419,6 +2489,8 @@ "InstanceReference", "InstancesAddResourcePoliciesRequest", "InstancesBulkInsertOperationMetadata", + "InstanceSettings", + "InstanceSettingsMetadata", "InstancesGetEffectiveFirewallsResponse", "InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "InstancesRemoveResourcePoliciesRequest", @@ -2491,6 +2563,7 @@ "ListBackendBucketsRequest", "ListBackendServicesRequest", "ListDisksRequest", + "ListDisksStoragePoolsRequest", "ListDiskTypesRequest", "ListErrorsInstanceGroupManagersRequest", "ListErrorsRegionInstanceGroupManagersRequest", @@ -2506,6 +2579,7 @@ "ListGlobalPublicDelegatedPrefixesRequest", "ListHealthChecksRequest", "ListImagesRequest", + "ListInstanceGroupManagerResizeRequestsRequest", "ListInstanceGroupManagersRequest", "ListInstanceGroupsRequest", "ListInstancesInstanceGroupsRequest", @@ -2574,6 +2648,8 @@ "ListSnapshotsRequest", "ListSslCertificatesRequest", "ListSslPoliciesRequest", + "ListStoragePoolsRequest", + "ListStoragePoolTypesRequest", "ListSubnetworksRequest", "ListTargetGrpcProxiesRequest", "ListTargetHttpProxiesRequest", @@ -2666,6 +2742,7 @@ "NodeGroupsAddNodesRequest", "NodeGroupsDeleteNodesRequest", "NodeGroupsListNodes", + "NodeGroupsPerformMaintenanceRequest", "NodeGroupsScopedList", "NodeGroupsSetNodeTemplateRequest", "NodeGroupsSimulateMaintenanceEventRequest", @@ -2708,6 +2785,7 @@ "PatchHealthCheckRequest", "PatchImageRequest", "PatchInstanceGroupManagerRequest", + "PatchInstanceSettingRequest", "PatchInterconnectAttachmentRequest", "PatchInterconnectRequest", "PatchNetworkAttachmentRequest", @@ -2749,6 +2827,7 @@ "PathMatcher", "PathRule", "PerformMaintenanceInstanceRequest", + "PerformMaintenanceNodeGroupRequest", "PerInstanceConfig", "Policy", "PreconfiguredWafSet", @@ -2774,6 +2853,7 @@ "PublicDelegatedPrefixPublicDelegatedSubPrefix", "Quota", "QuotaExceededInfo", + "QuotaStatusWarning", "RawDisk", "RecreateInstancesInstanceGroupManagerRequest", "RecreateInstancesRegionInstanceGroupManagerRequest", @@ -2975,6 +3055,7 @@ "SetIamPolicyResourcePolicyRequest", "SetIamPolicyServiceAttachmentRequest", "SetIamPolicySnapshotRequest", + "SetIamPolicyStoragePoolRequest", "SetIamPolicySubnetworkRequest", "SetInstanceTemplateInstanceGroupManagerRequest", "SetInstanceTemplateRegionInstanceGroupManagerRequest", @@ -3075,6 +3156,17 @@ "StopGroupAsyncReplicationDiskRequest", "StopGroupAsyncReplicationRegionDiskRequest", "StopInstanceRequest", + "StoragePool", + "StoragePoolAggregatedList", + "StoragePoolDisk", + "StoragePoolList", + "StoragePoolListDisks", + "StoragePoolResourceStatus", + "StoragePoolsScopedList", + "StoragePoolType", + "StoragePoolTypeAggregatedList", + "StoragePoolTypeList", + "StoragePoolTypesScopedList", "Subnetwork", "SubnetworkAggregatedList", "SubnetworkList", @@ -3158,6 +3250,7 @@ "TestIamPermissionsResourcePolicyRequest", "TestIamPermissionsServiceAttachmentRequest", "TestIamPermissionsSnapshotRequest", + "TestIamPermissionsStoragePoolRequest", "TestIamPermissionsSubnetworkRequest", "TestIamPermissionsVpnGatewayRequest", "TestPermissionsRequest", @@ -3186,6 +3279,7 @@ "UpdateReservationRequest", "UpdateRouterRequest", "UpdateShieldedInstanceConfigInstanceRequest", + "UpdateStoragePoolRequest", "UpdateUrlMapRequest", "UrlMap", "UrlMapList", diff --git a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py index 8099b154e9b6..558c8aab67c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py index b7a0488f5243..c1d442a9ad77 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py @@ -40,8 +40,12 @@ from .services.health_checks import HealthChecksClient from .services.image_family_views import ImageFamilyViewsClient from .services.images import ImagesClient +from .services.instance_group_manager_resize_requests import ( + InstanceGroupManagerResizeRequestsClient, +) from .services.instance_group_managers import InstanceGroupManagersClient from .services.instance_groups import InstanceGroupsClient +from .services.instance_settings_service import InstanceSettingsServiceClient from .services.instance_templates import InstanceTemplatesClient from .services.instances import InstancesClient from .services.instant_snapshots import InstantSnapshotsClient @@ -102,6 +106,8 @@ from .services.snapshots import SnapshotsClient from .services.ssl_certificates import SslCertificatesClient from .services.ssl_policies import SslPoliciesClient +from .services.storage_pool_types import StoragePoolTypesClient +from .services.storage_pools import StoragePoolsClient from .services.subnetworks import SubnetworksClient from .services.target_grpc_proxies import TargetGrpcProxiesClient from .services.target_http_proxies import TargetHttpProxiesClient @@ -182,6 +188,8 @@ AggregatedListServiceAttachmentsRequest, AggregatedListSslCertificatesRequest, AggregatedListSslPoliciesRequest, + AggregatedListStoragePoolsRequest, + AggregatedListStoragePoolTypesRequest, AggregatedListSubnetworksRequest, AggregatedListTargetHttpProxiesRequest, AggregatedListTargetHttpsProxiesRequest, @@ -266,6 +274,7 @@ BulkInsertRegionInstanceRequest, CacheInvalidationRule, CacheKeyPolicy, + CancelInstanceGroupManagerResizeRequestRequest, CircuitBreakers, CloneRulesFirewallPolicyRequest, CloneRulesNetworkFirewallPolicyRequest, @@ -308,6 +317,7 @@ DeleteHealthCheckRequest, DeleteImageRequest, DeleteInstanceGroupManagerRequest, + DeleteInstanceGroupManagerResizeRequestRequest, DeleteInstanceGroupRequest, DeleteInstanceRequest, DeleteInstancesInstanceGroupManagerRequest, @@ -362,6 +372,7 @@ DeleteSnapshotRequest, DeleteSslCertificateRequest, DeleteSslPolicyRequest, + DeleteStoragePoolRequest, DeleteSubnetworkRequest, DeleteTargetGrpcProxyRequest, DeleteTargetHttpProxyRequest, @@ -493,12 +504,15 @@ GetIamPolicyResourcePolicyRequest, GetIamPolicyServiceAttachmentRequest, GetIamPolicySnapshotRequest, + GetIamPolicyStoragePoolRequest, GetIamPolicySubnetworkRequest, GetImageFamilyViewRequest, GetImageRequest, GetInstanceGroupManagerRequest, + GetInstanceGroupManagerResizeRequestRequest, GetInstanceGroupRequest, GetInstanceRequest, + GetInstanceSettingRequest, GetInstanceTemplateRequest, GetInstantSnapshotRequest, GetInterconnectAttachmentRequest, @@ -567,6 +581,8 @@ GetSslCertificateRequest, GetSslPolicyRequest, GetStatusVpnGatewayRequest, + GetStoragePoolRequest, + GetStoragePoolTypeRequest, GetSubnetworkRequest, GetTargetGrpcProxyRequest, GetTargetHttpProxyRequest, @@ -643,6 +659,7 @@ InsertHealthCheckRequest, InsertImageRequest, InsertInstanceGroupManagerRequest, + InsertInstanceGroupManagerResizeRequestRequest, InsertInstanceGroupRequest, InsertInstanceRequest, InsertInstanceTemplateRequest, @@ -689,6 +706,7 @@ InsertSnapshotRequest, InsertSslCertificateRequest, InsertSslPolicyRequest, + InsertStoragePoolRequest, InsertSubnetworkRequest, InsertTargetGrpcProxyRequest, InsertTargetHttpProxyRequest, @@ -715,6 +733,10 @@ InstanceGroupManagerAutoHealingPolicy, InstanceGroupManagerInstanceLifecyclePolicy, InstanceGroupManagerList, + InstanceGroupManagerResizeRequest, + InstanceGroupManagerResizeRequestsListResponse, + InstanceGroupManagerResizeRequestStatus, + InstanceGroupManagerResizeRequestStatusLastAttempt, InstanceGroupManagersAbandonInstancesRequest, InstanceGroupManagersApplyUpdatesRequest, InstanceGroupManagersCreateInstancesRequest, @@ -754,6 +776,8 @@ InstanceReference, InstancesAddResourcePoliciesRequest, InstancesBulkInsertOperationMetadata, + InstanceSettings, + InstanceSettingsMetadata, InstancesGetEffectiveFirewallsResponse, InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, InstancesRemoveResourcePoliciesRequest, @@ -826,6 +850,7 @@ ListBackendBucketsRequest, ListBackendServicesRequest, ListDisksRequest, + ListDisksStoragePoolsRequest, ListDiskTypesRequest, ListErrorsInstanceGroupManagersRequest, ListErrorsRegionInstanceGroupManagersRequest, @@ -841,6 +866,7 @@ ListGlobalPublicDelegatedPrefixesRequest, ListHealthChecksRequest, ListImagesRequest, + ListInstanceGroupManagerResizeRequestsRequest, ListInstanceGroupManagersRequest, ListInstanceGroupsRequest, ListInstancesInstanceGroupsRequest, @@ -909,6 +935,8 @@ ListSnapshotsRequest, ListSslCertificatesRequest, ListSslPoliciesRequest, + ListStoragePoolsRequest, + ListStoragePoolTypesRequest, ListSubnetworksRequest, ListTargetGrpcProxiesRequest, ListTargetHttpProxiesRequest, @@ -1001,6 +1029,7 @@ NodeGroupsAddNodesRequest, NodeGroupsDeleteNodesRequest, NodeGroupsListNodes, + NodeGroupsPerformMaintenanceRequest, NodeGroupsScopedList, NodeGroupsSetNodeTemplateRequest, NodeGroupsSimulateMaintenanceEventRequest, @@ -1043,6 +1072,7 @@ PatchHealthCheckRequest, PatchImageRequest, PatchInstanceGroupManagerRequest, + PatchInstanceSettingRequest, PatchInterconnectAttachmentRequest, PatchInterconnectRequest, PatchNetworkAttachmentRequest, @@ -1084,6 +1114,7 @@ PathMatcher, PathRule, PerformMaintenanceInstanceRequest, + PerformMaintenanceNodeGroupRequest, PerInstanceConfig, Policy, PreconfiguredWafSet, @@ -1109,6 +1140,7 @@ PublicDelegatedPrefixPublicDelegatedSubPrefix, Quota, QuotaExceededInfo, + QuotaStatusWarning, RawDisk, RecreateInstancesInstanceGroupManagerRequest, RecreateInstancesRegionInstanceGroupManagerRequest, @@ -1310,6 +1342,7 @@ SetIamPolicyResourcePolicyRequest, SetIamPolicyServiceAttachmentRequest, SetIamPolicySnapshotRequest, + SetIamPolicyStoragePoolRequest, SetIamPolicySubnetworkRequest, SetInstanceTemplateInstanceGroupManagerRequest, SetInstanceTemplateRegionInstanceGroupManagerRequest, @@ -1410,6 +1443,17 @@ StopGroupAsyncReplicationDiskRequest, StopGroupAsyncReplicationRegionDiskRequest, StopInstanceRequest, + StoragePool, + StoragePoolAggregatedList, + StoragePoolDisk, + StoragePoolList, + StoragePoolListDisks, + StoragePoolResourceStatus, + StoragePoolsScopedList, + StoragePoolType, + StoragePoolTypeAggregatedList, + StoragePoolTypeList, + StoragePoolTypesScopedList, Subnetwork, SubnetworkAggregatedList, SubnetworkList, @@ -1493,6 +1537,7 @@ TestIamPermissionsResourcePolicyRequest, TestIamPermissionsServiceAttachmentRequest, TestIamPermissionsSnapshotRequest, + TestIamPermissionsStoragePoolRequest, TestIamPermissionsSubnetworkRequest, TestIamPermissionsVpnGatewayRequest, TestPermissionsRequest, @@ -1521,6 +1566,7 @@ UpdateReservationRequest, UpdateRouterRequest, UpdateShieldedInstanceConfigInstanceRequest, + UpdateStoragePoolRequest, UpdateUrlMapRequest, UrlMap, UrlMapList, @@ -1644,6 +1690,8 @@ "AggregatedListServiceAttachmentsRequest", "AggregatedListSslCertificatesRequest", "AggregatedListSslPoliciesRequest", + "AggregatedListStoragePoolTypesRequest", + "AggregatedListStoragePoolsRequest", "AggregatedListSubnetworksRequest", "AggregatedListTargetHttpProxiesRequest", "AggregatedListTargetHttpsProxiesRequest", @@ -1730,6 +1778,7 @@ "BulkInsertRegionInstanceRequest", "CacheInvalidationRule", "CacheKeyPolicy", + "CancelInstanceGroupManagerResizeRequestRequest", "CircuitBreakers", "CloneRulesFirewallPolicyRequest", "CloneRulesNetworkFirewallPolicyRequest", @@ -1772,6 +1821,7 @@ "DeleteHealthCheckRequest", "DeleteImageRequest", "DeleteInstanceGroupManagerRequest", + "DeleteInstanceGroupManagerResizeRequestRequest", "DeleteInstanceGroupRequest", "DeleteInstanceRequest", "DeleteInstanceTemplateRequest", @@ -1826,6 +1876,7 @@ "DeleteSnapshotRequest", "DeleteSslCertificateRequest", "DeleteSslPolicyRequest", + "DeleteStoragePoolRequest", "DeleteSubnetworkRequest", "DeleteTargetGrpcProxyRequest", "DeleteTargetHttpProxyRequest", @@ -1964,12 +2015,15 @@ "GetIamPolicyResourcePolicyRequest", "GetIamPolicyServiceAttachmentRequest", "GetIamPolicySnapshotRequest", + "GetIamPolicyStoragePoolRequest", "GetIamPolicySubnetworkRequest", "GetImageFamilyViewRequest", "GetImageRequest", "GetInstanceGroupManagerRequest", + "GetInstanceGroupManagerResizeRequestRequest", "GetInstanceGroupRequest", "GetInstanceRequest", + "GetInstanceSettingRequest", "GetInstanceTemplateRequest", "GetInstantSnapshotRequest", "GetInterconnectAttachmentRequest", @@ -2038,6 +2092,8 @@ "GetSslCertificateRequest", "GetSslPolicyRequest", "GetStatusVpnGatewayRequest", + "GetStoragePoolRequest", + "GetStoragePoolTypeRequest", "GetSubnetworkRequest", "GetTargetGrpcProxyRequest", "GetTargetHttpProxyRequest", @@ -2122,6 +2178,7 @@ "InsertHealthCheckRequest", "InsertImageRequest", "InsertInstanceGroupManagerRequest", + "InsertInstanceGroupManagerResizeRequestRequest", "InsertInstanceGroupRequest", "InsertInstanceRequest", "InsertInstanceTemplateRequest", @@ -2168,6 +2225,7 @@ "InsertSnapshotRequest", "InsertSslCertificateRequest", "InsertSslPolicyRequest", + "InsertStoragePoolRequest", "InsertSubnetworkRequest", "InsertTargetGrpcProxyRequest", "InsertTargetHttpProxyRequest", @@ -2194,6 +2252,11 @@ "InstanceGroupManagerAutoHealingPolicy", "InstanceGroupManagerInstanceLifecyclePolicy", "InstanceGroupManagerList", + "InstanceGroupManagerResizeRequest", + "InstanceGroupManagerResizeRequestStatus", + "InstanceGroupManagerResizeRequestStatusLastAttempt", + "InstanceGroupManagerResizeRequestsClient", + "InstanceGroupManagerResizeRequestsListResponse", "InstanceGroupManagerStatus", "InstanceGroupManagerStatusAllInstancesConfig", "InstanceGroupManagerStatusStateful", @@ -2233,6 +2296,9 @@ "InstanceProperties", "InstancePropertiesPatch", "InstanceReference", + "InstanceSettings", + "InstanceSettingsMetadata", + "InstanceSettingsServiceClient", "InstanceTemplate", "InstanceTemplateAggregatedList", "InstanceTemplateList", @@ -2317,6 +2383,7 @@ "ListBackendServicesRequest", "ListDiskTypesRequest", "ListDisksRequest", + "ListDisksStoragePoolsRequest", "ListErrorsInstanceGroupManagersRequest", "ListErrorsRegionInstanceGroupManagersRequest", "ListExternalVpnGatewaysRequest", @@ -2331,6 +2398,7 @@ "ListGlobalPublicDelegatedPrefixesRequest", "ListHealthChecksRequest", "ListImagesRequest", + "ListInstanceGroupManagerResizeRequestsRequest", "ListInstanceGroupManagersRequest", "ListInstanceGroupsRequest", "ListInstanceTemplatesRequest", @@ -2399,6 +2467,8 @@ "ListSnapshotsRequest", "ListSslCertificatesRequest", "ListSslPoliciesRequest", + "ListStoragePoolTypesRequest", + "ListStoragePoolsRequest", "ListSubnetworksRequest", "ListTargetGrpcProxiesRequest", "ListTargetHttpProxiesRequest", @@ -2499,6 +2569,7 @@ "NodeGroupsClient", "NodeGroupsDeleteNodesRequest", "NodeGroupsListNodes", + "NodeGroupsPerformMaintenanceRequest", "NodeGroupsScopedList", "NodeGroupsSetNodeTemplateRequest", "NodeGroupsSimulateMaintenanceEventRequest", @@ -2544,6 +2615,7 @@ "PatchHealthCheckRequest", "PatchImageRequest", "PatchInstanceGroupManagerRequest", + "PatchInstanceSettingRequest", "PatchInterconnectAttachmentRequest", "PatchInterconnectRequest", "PatchNetworkAttachmentRequest", @@ -2586,6 +2658,7 @@ "PathRule", "PerInstanceConfig", "PerformMaintenanceInstanceRequest", + "PerformMaintenanceNodeGroupRequest", "Policy", "PreconfiguredWafSet", "PreservedState", @@ -2613,6 +2686,7 @@ "PublicDelegatedPrefixesScopedList", "Quota", "QuotaExceededInfo", + "QuotaStatusWarning", "RawDisk", "RecreateInstancesInstanceGroupManagerRequest", "RecreateInstancesRegionInstanceGroupManagerRequest", @@ -2846,6 +2920,7 @@ "SetIamPolicyResourcePolicyRequest", "SetIamPolicyServiceAttachmentRequest", "SetIamPolicySnapshotRequest", + "SetIamPolicyStoragePoolRequest", "SetIamPolicySubnetworkRequest", "SetInstanceTemplateInstanceGroupManagerRequest", "SetInstanceTemplateRegionInstanceGroupManagerRequest", @@ -2949,6 +3024,19 @@ "StopGroupAsyncReplicationDiskRequest", "StopGroupAsyncReplicationRegionDiskRequest", "StopInstanceRequest", + "StoragePool", + "StoragePoolAggregatedList", + "StoragePoolDisk", + "StoragePoolList", + "StoragePoolListDisks", + "StoragePoolResourceStatus", + "StoragePoolType", + "StoragePoolTypeAggregatedList", + "StoragePoolTypeList", + "StoragePoolTypesClient", + "StoragePoolTypesScopedList", + "StoragePoolsClient", + "StoragePoolsScopedList", "Subnetwork", "SubnetworkAggregatedList", "SubnetworkList", @@ -3041,6 +3129,7 @@ "TestIamPermissionsResourcePolicyRequest", "TestIamPermissionsServiceAttachmentRequest", "TestIamPermissionsSnapshotRequest", + "TestIamPermissionsStoragePoolRequest", "TestIamPermissionsSubnetworkRequest", "TestIamPermissionsVpnGatewayRequest", "TestPermissionsRequest", @@ -3069,6 +3158,7 @@ "UpdateReservationRequest", "UpdateRouterRequest", "UpdateShieldedInstanceConfigInstanceRequest", + "UpdateStoragePoolRequest", "UpdateUrlMapRequest", "UrlMap", "UrlMapList", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json index 494060e6d6df..32af819c89f7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json @@ -965,6 +965,40 @@ } } }, + "InstanceGroupManagerResizeRequests": { + "clients": { + "rest": { + "libraryClient": "InstanceGroupManagerResizeRequestsClient", + "rpcs": { + "Cancel": { + "methods": [ + "cancel" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, "InstanceGroupManagers": { "clients": { "rest": { @@ -1128,6 +1162,25 @@ } } }, + "InstanceSettingsService": { + "clients": { + "rest": { + "libraryClient": "InstanceSettingsServiceClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, "InstanceTemplates": { "clients": { "rest": { @@ -2092,6 +2145,11 @@ "patch" ] }, + "PerformMaintenance": { + "methods": [ + "perform_maintenance" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" @@ -3935,6 +3993,89 @@ } } }, + "StoragePoolTypes": { + "clients": { + "rest": { + "libraryClient": "StoragePoolTypesClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, + "StoragePools": { + "clients": { + "rest": { + "libraryClient": "StoragePoolsClient", + "rpcs": { + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListDisks": { + "methods": [ + "list_disks" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, "Subnetworks": { "clients": { "rest": { diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py index 8099b154e9b6..558c8aab67c5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.18.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py index 0e0486382d2d..237ac0186de9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/accelerator_types/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AcceleratorTypesTransport]] = None, + transport: Optional[ + Union[ + str, AcceleratorTypesTransport, Callable[..., AcceleratorTypesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, AcceleratorTypesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,AcceleratorTypesTransport,Callable[..., AcceleratorTypesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AcceleratorTypesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[AcceleratorTypesTransport], + Callable[..., AcceleratorTypesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AcceleratorTypesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -711,8 +726,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -720,10 +735,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListAcceleratorTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListAcceleratorTypesRequest): request = compute.AggregatedListAcceleratorTypesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -848,8 +861,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, accelerator_type]) if request is not None and has_flattened_params: raise ValueError( @@ -857,10 +870,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetAcceleratorTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetAcceleratorTypeRequest): request = compute.GetAcceleratorTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -976,8 +987,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -985,10 +996,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListAcceleratorTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListAcceleratorTypesRequest): request = compute.ListAcceleratorTypesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py index b29458cdaac7..6e923f965dfc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/addresses/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AddressesTransport]] = None, + transport: Optional[ + Union[str, AddressesTransport, Callable[..., AddressesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, AddressesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,AddressesTransport,Callable[..., AddressesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AddressesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[AddressesTransport], Callable[..., AddressesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AddressesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -704,8 +716,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -713,10 +725,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListAddressesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListAddressesRequest): request = compute.AggregatedListAddressesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -833,8 +843,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, address]) if request is not None and has_flattened_params: raise ValueError( @@ -842,10 +852,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteAddressRequest): request = compute.DeleteAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -963,8 +971,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, address]) if request is not None and has_flattened_params: raise ValueError( @@ -972,10 +980,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteAddressRequest): request = compute.DeleteAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1122,8 +1128,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, address]) if request is not None and has_flattened_params: raise ValueError( @@ -1131,10 +1137,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetAddressRequest): request = compute.GetAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1250,8 +1254,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, address_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1259,10 +1263,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertAddressRequest): request = compute.InsertAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1377,8 +1379,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, address_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1386,10 +1388,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertAddressRequest): request = compute.InsertAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1526,8 +1526,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1535,10 +1535,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListAddressesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListAddressesRequest): request = compute.ListAddressesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1669,8 +1667,8 @@ def sample_move(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, address, region_addresses_move_request_resource] ) @@ -1680,10 +1678,8 @@ def sample_move(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveAddressRequest): request = compute.MoveAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1812,8 +1808,8 @@ def sample_move(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, address, region_addresses_move_request_resource] ) @@ -1823,10 +1819,8 @@ def sample_move(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveAddressRequest): request = compute.MoveAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1982,8 +1976,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -1993,10 +1987,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsAddressRequest): request = compute.SetLabelsAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2127,8 +2119,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -2138,10 +2130,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsAddressRequest): request = compute.SetLabelsAddressRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py index cdef7bc44172..1d80f46c23e6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/autoscalers/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AutoscalersTransport]] = None, + transport: Optional[ + Union[str, AutoscalersTransport, Callable[..., AutoscalersTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, AutoscalersTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,AutoscalersTransport,Callable[..., AutoscalersTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AutoscalersTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[AutoscalersTransport], Callable[..., AutoscalersTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AutoscalersTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -704,8 +716,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -713,10 +725,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListAutoscalersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListAutoscalersRequest): request = compute.AggregatedListAutoscalersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -831,8 +841,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler]) if request is not None and has_flattened_params: raise ValueError( @@ -840,10 +850,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteAutoscalerRequest): request = compute.DeleteAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -959,8 +967,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler]) if request is not None and has_flattened_params: raise ValueError( @@ -968,10 +976,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteAutoscalerRequest): request = compute.DeleteAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1121,8 +1127,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler]) if request is not None and has_flattened_params: raise ValueError( @@ -1130,10 +1136,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetAutoscalerRequest): request = compute.GetAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1249,8 +1253,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1258,10 +1262,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertAutoscalerRequest): request = compute.InsertAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1376,8 +1378,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1385,10 +1387,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertAutoscalerRequest): request = compute.InsertAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1526,8 +1526,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -1535,10 +1535,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListAutoscalersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListAutoscalersRequest): request = compute.ListAutoscalersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1662,8 +1660,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1671,10 +1669,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchAutoscalerRequest): request = compute.PatchAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1791,8 +1787,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1800,10 +1796,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchAutoscalerRequest): request = compute.PatchAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1943,8 +1937,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1952,10 +1946,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateAutoscalerRequest): request = compute.UpdateAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2070,8 +2062,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2079,10 +2071,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateAutoscalerRequest): request = compute.UpdateAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py index ae98ff226801..9310bcee09e7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BackendBucketsTransport]] = None, + transport: Optional[ + Union[str, BackendBucketsTransport, Callable[..., BackendBucketsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +521,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, BackendBucketsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,BackendBucketsTransport,Callable[..., BackendBucketsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BackendBucketsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +637,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[BackendBucketsTransport], Callable[..., BackendBucketsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BackendBucketsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -725,8 +737,8 @@ def sample_add_signed_url_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket, signed_url_key_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -734,10 +746,8 @@ def sample_add_signed_url_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddSignedUrlKeyBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddSignedUrlKeyBackendBucketRequest): request = compute.AddSignedUrlKeyBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -858,8 +868,8 @@ def sample_add_signed_url_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket, signed_url_key_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -867,10 +877,8 @@ def sample_add_signed_url_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddSignedUrlKeyBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddSignedUrlKeyBackendBucketRequest): request = compute.AddSignedUrlKeyBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1004,8 +1012,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket]) if request is not None and has_flattened_params: raise ValueError( @@ -1013,10 +1021,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteBackendBucketRequest): request = compute.DeleteBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1124,8 +1130,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket]) if request is not None and has_flattened_params: raise ValueError( @@ -1133,10 +1139,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteBackendBucketRequest): request = compute.DeleteBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1282,8 +1286,8 @@ def sample_delete_signed_url_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket, key_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1291,10 +1295,8 @@ def sample_delete_signed_url_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSignedUrlKeyBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSignedUrlKeyBackendBucketRequest): request = compute.DeleteSignedUrlKeyBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1418,8 +1420,8 @@ def sample_delete_signed_url_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket, key_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1427,10 +1429,8 @@ def sample_delete_signed_url_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSignedUrlKeyBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSignedUrlKeyBackendBucketRequest): request = compute.DeleteSignedUrlKeyBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1567,8 +1567,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket]) if request is not None and has_flattened_params: raise ValueError( @@ -1576,10 +1576,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetBackendBucketRequest): request = compute.GetBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1708,8 +1706,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1717,10 +1715,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyBackendBucketRequest): request = compute.GetIamPolicyBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1826,8 +1822,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1835,10 +1831,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertBackendBucketRequest): request = compute.InsertBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1939,8 +1933,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1948,10 +1942,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertBackendBucketRequest): request = compute.InsertBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2074,8 +2066,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -2083,10 +2075,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListBackendBucketsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListBackendBucketsRequest): request = compute.ListBackendBucketsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2205,8 +2195,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket, backend_bucket_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2214,10 +2204,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchBackendBucketRequest): request = compute.PatchBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2336,8 +2324,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket, backend_bucket_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2345,10 +2333,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchBackendBucketRequest): request = compute.PatchBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2494,8 +2480,8 @@ def sample_set_edge_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, backend_bucket, security_policy_reference_resource] ) @@ -2505,10 +2491,8 @@ def sample_set_edge_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetEdgeSecurityPolicyBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetEdgeSecurityPolicyBackendBucketRequest): request = compute.SetEdgeSecurityPolicyBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2632,8 +2616,8 @@ def sample_set_edge_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, backend_bucket, security_policy_reference_resource] ) @@ -2643,10 +2627,8 @@ def sample_set_edge_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetEdgeSecurityPolicyBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetEdgeSecurityPolicyBackendBucketRequest): request = compute.SetEdgeSecurityPolicyBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2811,8 +2793,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_policy_request_resource] ) @@ -2822,10 +2804,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyBackendBucketRequest): request = compute.SetIamPolicyBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2946,8 +2926,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -2957,10 +2937,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsBackendBucketRequest): request = compute.TestIamPermissionsBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3079,8 +3057,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket, backend_bucket_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3088,10 +3066,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateBackendBucketRequest): request = compute.UpdateBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3208,8 +3184,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_bucket, backend_bucket_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3217,10 +3193,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateBackendBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateBackendBucketRequest): request = compute.UpdateBackendBucketRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py index fb698303a359..07a304e052f1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_services/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, BackendServicesTransport]] = None, + transport: Optional[ + Union[ + str, BackendServicesTransport, Callable[..., BackendServicesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, BackendServicesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,BackendServicesTransport,Callable[..., BackendServicesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BackendServicesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[BackendServicesTransport], Callable[..., BackendServicesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BackendServicesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -725,8 +739,8 @@ def sample_add_signed_url_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service, signed_url_key_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -734,10 +748,8 @@ def sample_add_signed_url_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddSignedUrlKeyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddSignedUrlKeyBackendServiceRequest): request = compute.AddSignedUrlKeyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -858,8 +870,8 @@ def sample_add_signed_url_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service, signed_url_key_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -867,10 +879,8 @@ def sample_add_signed_url_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddSignedUrlKeyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddSignedUrlKeyBackendServiceRequest): request = compute.AddSignedUrlKeyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1006,8 +1016,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1015,10 +1025,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListBackendServicesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListBackendServicesRequest): request = compute.AggregatedListBackendServicesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1128,8 +1136,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service]) if request is not None and has_flattened_params: raise ValueError( @@ -1137,10 +1145,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteBackendServiceRequest): request = compute.DeleteBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1248,8 +1254,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service]) if request is not None and has_flattened_params: raise ValueError( @@ -1257,10 +1263,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteBackendServiceRequest): request = compute.DeleteBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1406,8 +1410,8 @@ def sample_delete_signed_url_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service, key_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1415,10 +1419,8 @@ def sample_delete_signed_url_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSignedUrlKeyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSignedUrlKeyBackendServiceRequest): request = compute.DeleteSignedUrlKeyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1542,8 +1544,8 @@ def sample_delete_signed_url_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service, key_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1551,10 +1553,8 @@ def sample_delete_signed_url_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSignedUrlKeyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSignedUrlKeyBackendServiceRequest): request = compute.DeleteSignedUrlKeyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1701,8 +1701,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service]) if request is not None and has_flattened_params: raise ValueError( @@ -1710,10 +1710,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetBackendServiceRequest): request = compute.GetBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1829,8 +1827,8 @@ def sample_get_health(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, backend_service, resource_group_reference_resource] ) @@ -1840,10 +1838,8 @@ def sample_get_health(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetHealthBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetHealthBackendServiceRequest): request = compute.GetHealthBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1978,8 +1974,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1987,10 +1983,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyBackendServiceRequest): request = compute.GetIamPolicyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2097,8 +2091,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2106,10 +2100,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertBackendServiceRequest): request = compute.InsertBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2211,8 +2203,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2220,10 +2212,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertBackendServiceRequest): request = compute.InsertBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2346,8 +2336,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -2355,10 +2345,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListBackendServicesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListBackendServicesRequest): request = compute.ListBackendServicesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2464,8 +2452,8 @@ def sample_list_usable(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -2473,10 +2461,8 @@ def sample_list_usable(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListUsableBackendServicesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListUsableBackendServicesRequest): request = compute.ListUsableBackendServicesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2596,8 +2582,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service, backend_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2605,10 +2591,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchBackendServiceRequest): request = compute.PatchBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2728,8 +2712,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service, backend_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2737,10 +2721,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchBackendServiceRequest): request = compute.PatchBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2887,8 +2869,8 @@ def sample_set_edge_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, backend_service, security_policy_reference_resource] ) @@ -2898,10 +2880,8 @@ def sample_set_edge_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetEdgeSecurityPolicyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetEdgeSecurityPolicyBackendServiceRequest): request = compute.SetEdgeSecurityPolicyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3026,8 +3006,8 @@ def sample_set_edge_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, backend_service, security_policy_reference_resource] ) @@ -3037,10 +3017,8 @@ def sample_set_edge_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetEdgeSecurityPolicyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetEdgeSecurityPolicyBackendServiceRequest): request = compute.SetEdgeSecurityPolicyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3207,8 +3185,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_policy_request_resource] ) @@ -3218,10 +3196,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyBackendServiceRequest): request = compute.SetIamPolicyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3346,8 +3322,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, backend_service, security_policy_reference_resource] ) @@ -3357,10 +3333,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSecurityPolicyBackendServiceRequest): request = compute.SetSecurityPolicyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3485,8 +3459,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, backend_service, security_policy_reference_resource] ) @@ -3496,10 +3470,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSecurityPolicyBackendServiceRequest): request = compute.SetSecurityPolicyBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3644,8 +3616,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -3655,10 +3627,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsBackendServiceRequest): request = compute.TestIamPermissionsBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3778,8 +3748,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service, backend_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3787,10 +3757,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateBackendServiceRequest): request = compute.UpdateBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3908,8 +3876,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, backend_service, backend_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3917,10 +3885,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateBackendServiceRequest): request = compute.UpdateBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py index 82d9226a818c..7ffc5bd3dab0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disk_types/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -501,7 +502,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DiskTypesTransport]] = None, + transport: Optional[ + Union[str, DiskTypesTransport, Callable[..., DiskTypesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -513,9 +516,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DiskTypesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DiskTypesTransport,Callable[..., DiskTypesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DiskTypesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -624,8 +629,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DiskTypesTransport], Callable[..., DiskTypesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DiskTypesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -701,8 +713,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -710,10 +722,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListDiskTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListDiskTypesRequest): request = compute.AggregatedListDiskTypesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -838,8 +848,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk_type]) if request is not None and has_flattened_params: raise ValueError( @@ -847,10 +857,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetDiskTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetDiskTypeRequest): request = compute.GetDiskTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -965,8 +973,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -974,10 +982,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListDiskTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListDiskTypesRequest): request = compute.ListDiskTypesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py index 0655f12549ff..262c57e654b1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, DisksTransport]] = None, + transport: Optional[ + Union[str, DisksTransport, Callable[..., DisksTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, DisksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,DisksTransport,Callable[..., DisksTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DisksTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[DisksTransport], Callable[..., DisksTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DisksTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -726,8 +738,8 @@ def sample_add_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, disk, disks_add_resource_policies_request_resource] ) @@ -737,10 +749,8 @@ def sample_add_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddResourcePoliciesDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddResourcePoliciesDiskRequest): request = compute.AddResourcePoliciesDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -872,8 +882,8 @@ def sample_add_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, disk, disks_add_resource_policies_request_resource] ) @@ -883,10 +893,8 @@ def sample_add_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddResourcePoliciesDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddResourcePoliciesDiskRequest): request = compute.AddResourcePoliciesDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1021,8 +1029,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1030,10 +1038,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListDisksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListDisksRequest): request = compute.AggregatedListDisksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1151,8 +1157,8 @@ def sample_bulk_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, bulk_insert_disk_resource_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1160,10 +1166,8 @@ def sample_bulk_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.BulkInsertDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.BulkInsertDiskRequest): request = compute.BulkInsertDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1283,8 +1287,8 @@ def sample_bulk_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, bulk_insert_disk_resource_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1292,10 +1296,8 @@ def sample_bulk_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.BulkInsertDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.BulkInsertDiskRequest): request = compute.BulkInsertDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1451,8 +1453,8 @@ def sample_create_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk, snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1460,10 +1462,8 @@ def sample_create_snapshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CreateSnapshotDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CreateSnapshotDiskRequest): request = compute.CreateSnapshotDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1595,8 +1595,8 @@ def sample_create_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk, snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1604,10 +1604,8 @@ def sample_create_snapshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CreateSnapshotDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CreateSnapshotDiskRequest): request = compute.CreateSnapshotDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1757,8 +1755,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -1766,10 +1764,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteDiskRequest): request = compute.DeleteDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1892,8 +1888,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -1901,10 +1897,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteDiskRequest): request = compute.DeleteDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2058,8 +2052,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -2067,10 +2061,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetDiskRequest): request = compute.GetDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2211,8 +2203,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2220,10 +2212,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyDiskRequest): request = compute.GetIamPolicyDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2345,8 +2335,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2354,10 +2344,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertDiskRequest): request = compute.InsertDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2478,8 +2466,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2487,10 +2475,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertDiskRequest): request = compute.InsertDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2629,8 +2615,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -2638,10 +2624,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListDisksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListDisksRequest): request = compute.ListDisksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2775,8 +2759,8 @@ def sample_remove_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, disk, disks_remove_resource_policies_request_resource] ) @@ -2786,10 +2770,8 @@ def sample_remove_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveResourcePoliciesDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveResourcePoliciesDiskRequest): request = compute.RemoveResourcePoliciesDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2921,8 +2903,8 @@ def sample_remove_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, disk, disks_remove_resource_policies_request_resource] ) @@ -2932,10 +2914,8 @@ def sample_remove_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveResourcePoliciesDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveResourcePoliciesDiskRequest): request = compute.RemoveResourcePoliciesDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3088,8 +3068,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk, disks_resize_request_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3097,10 +3077,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeDiskRequest): request = compute.ResizeDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3226,8 +3204,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk, disks_resize_request_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3235,10 +3213,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeDiskRequest): request = compute.ResizeDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3412,8 +3388,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_policy_request_resource] ) @@ -3423,10 +3399,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyDiskRequest): request = compute.SetIamPolicyDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3557,8 +3531,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_labels_request_resource] ) @@ -3568,10 +3542,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsDiskRequest): request = compute.SetLabelsDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3702,8 +3674,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_labels_request_resource] ) @@ -3713,10 +3685,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsDiskRequest): request = compute.SetLabelsDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3872,8 +3842,8 @@ def sample_start_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, disk, disks_start_async_replication_request_resource] ) @@ -3883,10 +3853,8 @@ def sample_start_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StartAsyncReplicationDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StartAsyncReplicationDiskRequest): request = compute.StartAsyncReplicationDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4017,8 +3985,8 @@ def sample_start_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, disk, disks_start_async_replication_request_resource] ) @@ -4028,10 +3996,8 @@ def sample_start_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StartAsyncReplicationDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StartAsyncReplicationDiskRequest): request = compute.StartAsyncReplicationDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4179,8 +4145,8 @@ def sample_stop_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -4188,10 +4154,8 @@ def sample_stop_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopAsyncReplicationDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopAsyncReplicationDiskRequest): request = compute.StopAsyncReplicationDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4310,8 +4274,8 @@ def sample_stop_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -4319,10 +4283,8 @@ def sample_stop_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopAsyncReplicationDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopAsyncReplicationDiskRequest): request = compute.StopAsyncReplicationDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4472,8 +4434,8 @@ def sample_stop_group_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, disks_stop_group_async_replication_resource_resource] ) @@ -4483,10 +4445,8 @@ def sample_stop_group_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopGroupAsyncReplicationDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopGroupAsyncReplicationDiskRequest): request = compute.StopGroupAsyncReplicationDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4614,8 +4574,8 @@ def sample_stop_group_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, disks_stop_group_async_replication_resource_resource] ) @@ -4625,10 +4585,8 @@ def sample_stop_group_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopGroupAsyncReplicationDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopGroupAsyncReplicationDiskRequest): request = compute.StopGroupAsyncReplicationDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4783,8 +4741,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, test_permissions_request_resource] ) @@ -4794,10 +4752,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsDiskRequest): request = compute.TestIamPermissionsDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4927,8 +4883,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk, disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -4936,10 +4892,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateDiskRequest): request = compute.UpdateDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5067,8 +5021,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, disk, disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -5076,10 +5030,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateDiskRequest): request = compute.UpdateDiskRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py index 67c760796806..788e4946d22c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/external_vpn_gateways/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ExternalVpnGatewaysTransport]] = None, + transport: Optional[ + Union[ + str, + ExternalVpnGatewaysTransport, + Callable[..., ExternalVpnGatewaysTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ExternalVpnGatewaysTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ExternalVpnGatewaysTransport,Callable[..., ExternalVpnGatewaysTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ExternalVpnGatewaysTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ExternalVpnGatewaysTransport], + Callable[..., ExternalVpnGatewaysTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ExternalVpnGatewaysTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -714,8 +731,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, external_vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -723,10 +740,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteExternalVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteExternalVpnGatewayRequest): request = compute.DeleteExternalVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -834,8 +849,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, external_vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -843,10 +858,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteExternalVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteExternalVpnGatewayRequest): request = compute.DeleteExternalVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -992,8 +1005,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, external_vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -1001,10 +1014,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetExternalVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetExternalVpnGatewayRequest): request = compute.GetExternalVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1110,8 +1121,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, external_vpn_gateway_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1119,10 +1130,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertExternalVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertExternalVpnGatewayRequest): request = compute.InsertExternalVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1223,8 +1232,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, external_vpn_gateway_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1232,10 +1241,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertExternalVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertExternalVpnGatewayRequest): request = compute.InsertExternalVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1358,8 +1365,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1367,10 +1374,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListExternalVpnGatewaysRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListExternalVpnGatewaysRequest): request = compute.ListExternalVpnGatewaysRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1492,8 +1497,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -1503,10 +1508,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsExternalVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsExternalVpnGatewayRequest): request = compute.SetLabelsExternalVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1630,8 +1633,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -1641,10 +1644,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsExternalVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsExternalVpnGatewayRequest): request = compute.SetLabelsExternalVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1789,8 +1790,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -1800,10 +1801,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsExternalVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsExternalVpnGatewayRequest): request = compute.TestIamPermissionsExternalVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py index c35810cb0d20..7bd1232b5498 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewall_policies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, FirewallPoliciesTransport]] = None, + transport: Optional[ + Union[ + str, FirewallPoliciesTransport, Callable[..., FirewallPoliciesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, FirewallPoliciesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,FirewallPoliciesTransport,Callable[..., FirewallPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FirewallPoliciesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[FirewallPoliciesTransport], + Callable[..., FirewallPoliciesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., FirewallPoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -718,8 +733,8 @@ def sample_add_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [firewall_policy, firewall_policy_association_resource] ) @@ -729,10 +744,8 @@ def sample_add_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddAssociationFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddAssociationFirewallPolicyRequest): request = compute.AddAssociationFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -843,8 +856,8 @@ def sample_add_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [firewall_policy, firewall_policy_association_resource] ) @@ -854,10 +867,8 @@ def sample_add_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddAssociationFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddAssociationFirewallPolicyRequest): request = compute.AddAssociationFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -986,8 +997,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy, firewall_policy_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -995,10 +1006,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleFirewallPolicyRequest): request = compute.AddRuleFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1102,8 +1111,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy, firewall_policy_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1111,10 +1120,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleFirewallPolicyRequest): request = compute.AddRuleFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1235,8 +1242,8 @@ def sample_clone_rules(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1244,10 +1251,8 @@ def sample_clone_rules(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CloneRulesFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CloneRulesFirewallPolicyRequest): request = compute.CloneRulesFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1343,8 +1348,8 @@ def sample_clone_rules(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1352,10 +1357,8 @@ def sample_clone_rules(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CloneRulesFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CloneRulesFirewallPolicyRequest): request = compute.CloneRulesFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1474,8 +1477,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1483,10 +1486,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteFirewallPolicyRequest): request = compute.DeleteFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1582,8 +1583,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1591,10 +1592,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteFirewallPolicyRequest): request = compute.DeleteFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1711,8 +1710,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1720,10 +1719,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetFirewallPolicyRequest): request = compute.GetFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1819,8 +1816,8 @@ def sample_get_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1828,10 +1825,8 @@ def sample_get_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetAssociationFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetAssociationFirewallPolicyRequest): request = compute.GetAssociationFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1950,8 +1945,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1959,10 +1954,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyFirewallPolicyRequest): request = compute.GetIamPolicyFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2058,8 +2051,8 @@ def sample_get_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -2067,10 +2060,8 @@ def sample_get_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRuleFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRuleFirewallPolicyRequest): request = compute.GetRuleFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2175,8 +2166,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent_id, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2184,10 +2175,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertFirewallPolicyRequest): request = compute.InsertFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2286,8 +2275,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent_id, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2295,10 +2284,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertFirewallPolicyRequest): request = compute.InsertFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2406,10 +2393,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListFirewallPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListFirewallPoliciesRequest): request = compute.ListFirewallPoliciesRequest(request) @@ -2494,10 +2479,8 @@ def sample_list_associations(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListAssociationsFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListAssociationsFirewallPolicyRequest): request = compute.ListAssociationsFirewallPolicyRequest(request) @@ -2592,8 +2575,8 @@ def sample_move(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy, parent_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2601,10 +2584,8 @@ def sample_move(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveFirewallPolicyRequest): request = compute.MoveFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2713,8 +2694,8 @@ def sample_move(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy, parent_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2722,10 +2703,8 @@ def sample_move(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveFirewallPolicyRequest): request = compute.MoveFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2854,8 +2833,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2863,10 +2842,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchFirewallPolicyRequest): request = compute.PatchFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2971,8 +2948,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2980,10 +2957,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchFirewallPolicyRequest): request = compute.PatchFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3110,8 +3085,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy, firewall_policy_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3119,10 +3094,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleFirewallPolicyRequest): request = compute.PatchRuleFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3226,8 +3199,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy, firewall_policy_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3235,10 +3208,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleFirewallPolicyRequest): request = compute.PatchRuleFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3362,8 +3333,8 @@ def sample_remove_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3371,10 +3342,8 @@ def sample_remove_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveAssociationFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveAssociationFirewallPolicyRequest): request = compute.RemoveAssociationFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3473,8 +3442,8 @@ def sample_remove_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3482,10 +3451,8 @@ def sample_remove_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveAssociationFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveAssociationFirewallPolicyRequest): request = compute.RemoveAssociationFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3604,8 +3571,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3613,10 +3580,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveRuleFirewallPolicyRequest): request = compute.RemoveRuleFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3712,8 +3677,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3721,10 +3686,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveRuleFirewallPolicyRequest): request = compute.RemoveRuleFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3874,8 +3837,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [resource, global_organization_set_policy_request_resource] ) @@ -3885,10 +3848,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyFirewallPolicyRequest): request = compute.SetIamPolicyFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3995,8 +3956,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource, test_permissions_request_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -4004,10 +3965,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsFirewallPolicyRequest): request = compute.TestIamPermissionsFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py index 9d8e012194e7..adf034916956 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/firewalls/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, FirewallsTransport]] = None, + transport: Optional[ + Union[str, FirewallsTransport, Callable[..., FirewallsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, FirewallsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,FirewallsTransport,Callable[..., FirewallsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the FirewallsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[FirewallsTransport], Callable[..., FirewallsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., FirewallsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -707,8 +719,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall]) if request is not None and has_flattened_params: raise ValueError( @@ -716,10 +728,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteFirewallRequest): request = compute.DeleteFirewallRequest(request) # If we have keyword arguments corresponding to fields on the @@ -825,8 +835,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall]) if request is not None and has_flattened_params: raise ValueError( @@ -834,10 +844,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteFirewallRequest): request = compute.DeleteFirewallRequest(request) # If we have keyword arguments corresponding to fields on the @@ -969,8 +977,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall]) if request is not None and has_flattened_params: raise ValueError( @@ -978,10 +986,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetFirewallRequest): request = compute.GetFirewallRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1087,8 +1093,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1096,10 +1102,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertFirewallRequest): request = compute.InsertFirewallRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1200,8 +1204,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1209,10 +1213,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertFirewallRequest): request = compute.InsertFirewallRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1334,8 +1336,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1343,10 +1345,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListFirewallsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListFirewallsRequest): request = compute.ListFirewallsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1463,8 +1463,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall, firewall_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1472,10 +1472,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchFirewallRequest): request = compute.PatchFirewallRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1592,8 +1590,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall, firewall_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1601,10 +1599,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchFirewallRequest): request = compute.PatchFirewallRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1746,8 +1742,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall, firewall_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1755,10 +1751,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateFirewallRequest): request = compute.UpdateFirewallRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1876,8 +1870,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall, firewall_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1885,10 +1879,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateFirewallRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateFirewallRequest): request = compute.UpdateFirewallRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py index 1d3596c810e3..6b4cb6ebc526 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/forwarding_rules/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ForwardingRulesTransport]] = None, + transport: Optional[ + Union[ + str, ForwardingRulesTransport, Callable[..., ForwardingRulesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ForwardingRulesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ForwardingRulesTransport,Callable[..., ForwardingRulesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ForwardingRulesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ForwardingRulesTransport], Callable[..., ForwardingRulesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ForwardingRulesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -711,8 +725,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -720,10 +734,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListForwardingRulesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListForwardingRulesRequest): request = compute.AggregatedListForwardingRulesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -842,8 +854,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, forwarding_rule]) if request is not None and has_flattened_params: raise ValueError( @@ -851,10 +863,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteForwardingRuleRequest): request = compute.DeleteForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -974,8 +984,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, forwarding_rule]) if request is not None and has_flattened_params: raise ValueError( @@ -983,10 +993,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteForwardingRuleRequest): request = compute.DeleteForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1141,8 +1149,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, forwarding_rule]) if request is not None and has_flattened_params: raise ValueError( @@ -1150,10 +1158,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetForwardingRuleRequest): request = compute.GetForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1272,8 +1278,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, forwarding_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1281,10 +1287,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertForwardingRuleRequest): request = compute.InsertForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1402,8 +1406,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, forwarding_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1411,10 +1415,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertForwardingRuleRequest): request = compute.InsertForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1554,8 +1556,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1563,10 +1565,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListForwardingRulesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListForwardingRulesRequest): request = compute.ListForwardingRulesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1701,8 +1701,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, forwarding_rule, forwarding_rule_resource] ) @@ -1712,10 +1712,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchForwardingRuleRequest): request = compute.PatchForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1846,8 +1844,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, forwarding_rule, forwarding_rule_resource] ) @@ -1857,10 +1855,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchForwardingRuleRequest): request = compute.PatchForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2015,8 +2011,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -2026,10 +2022,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsForwardingRuleRequest): request = compute.SetLabelsForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2161,8 +2155,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -2172,10 +2166,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsForwardingRuleRequest): request = compute.SetLabelsForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2331,8 +2323,8 @@ def sample_set_target(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, forwarding_rule, target_reference_resource] ) @@ -2342,10 +2334,8 @@ def sample_set_target(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTargetForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetTargetForwardingRuleRequest): request = compute.SetTargetForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2474,8 +2464,8 @@ def sample_set_target(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, forwarding_rule, target_reference_resource] ) @@ -2485,10 +2475,8 @@ def sample_set_target(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTargetForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetTargetForwardingRuleRequest): request = compute.SetTargetForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py index b8c358d5acc2..3fc365cfa9bb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_addresses/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, GlobalAddressesTransport]] = None, + transport: Optional[ + Union[ + str, GlobalAddressesTransport, Callable[..., GlobalAddressesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, GlobalAddressesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,GlobalAddressesTransport,Callable[..., GlobalAddressesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GlobalAddressesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[GlobalAddressesTransport], Callable[..., GlobalAddressesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GlobalAddressesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -714,8 +728,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, address]) if request is not None and has_flattened_params: raise ValueError( @@ -723,10 +737,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalAddressRequest): request = compute.DeleteGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -834,8 +846,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, address]) if request is not None and has_flattened_params: raise ValueError( @@ -843,10 +855,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalAddressRequest): request = compute.DeleteGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -983,8 +993,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, address]) if request is not None and has_flattened_params: raise ValueError( @@ -992,10 +1002,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetGlobalAddressRequest): request = compute.GetGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1101,8 +1109,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, address_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1110,10 +1118,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertGlobalAddressRequest): request = compute.InsertGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1214,8 +1220,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, address_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1223,10 +1229,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertGlobalAddressRequest): request = compute.InsertGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1348,8 +1352,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1357,10 +1361,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListGlobalAddressesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListGlobalAddressesRequest): request = compute.ListGlobalAddressesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1479,8 +1481,8 @@ def sample_move(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, address, global_addresses_move_request_resource] ) @@ -1490,10 +1492,8 @@ def sample_move(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveGlobalAddressRequest): request = compute.MoveGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1614,8 +1614,8 @@ def sample_move(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, address, global_addresses_move_request_resource] ) @@ -1625,10 +1625,8 @@ def sample_move(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveGlobalAddressRequest): request = compute.MoveGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1773,8 +1771,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -1784,10 +1782,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsGlobalAddressRequest): request = compute.SetLabelsGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1908,8 +1904,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -1919,10 +1915,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsGlobalAddressRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsGlobalAddressRequest): request = compute.SetLabelsGlobalAddressRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py index e656ae776824..51bf83bf4ab3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_forwarding_rules/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, GlobalForwardingRulesTransport]] = None, + transport: Optional[ + Union[ + str, + GlobalForwardingRulesTransport, + Callable[..., GlobalForwardingRulesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, GlobalForwardingRulesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,GlobalForwardingRulesTransport,Callable[..., GlobalForwardingRulesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GlobalForwardingRulesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -634,8 +643,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[GlobalForwardingRulesTransport], + Callable[..., GlobalForwardingRulesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GlobalForwardingRulesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -718,8 +735,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, forwarding_rule]) if request is not None and has_flattened_params: raise ValueError( @@ -727,10 +744,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalForwardingRuleRequest): request = compute.DeleteGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -840,8 +855,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, forwarding_rule]) if request is not None and has_flattened_params: raise ValueError( @@ -849,10 +864,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalForwardingRuleRequest): request = compute.DeleteGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -996,8 +1009,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, forwarding_rule]) if request is not None and has_flattened_params: raise ValueError( @@ -1005,10 +1018,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetGlobalForwardingRuleRequest): request = compute.GetGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1117,8 +1128,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, forwarding_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1126,10 +1137,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertGlobalForwardingRuleRequest): request = compute.InsertGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1233,8 +1242,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, forwarding_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1242,10 +1251,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertGlobalForwardingRuleRequest): request = compute.InsertGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1368,8 +1375,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1377,10 +1384,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListGlobalForwardingRulesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListGlobalForwardingRulesRequest): request = compute.ListGlobalForwardingRulesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1499,8 +1504,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, forwarding_rule, forwarding_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1508,10 +1513,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchGlobalForwardingRuleRequest): request = compute.PatchGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1630,8 +1633,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, forwarding_rule, forwarding_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1639,10 +1642,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchGlobalForwardingRuleRequest): request = compute.PatchGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1788,8 +1789,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -1799,10 +1800,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsGlobalForwardingRuleRequest): request = compute.SetLabelsGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1926,8 +1925,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -1937,10 +1936,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsGlobalForwardingRuleRequest): request = compute.SetLabelsGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2086,8 +2083,8 @@ def sample_set_target(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, forwarding_rule, target_reference_resource] ) @@ -2097,10 +2094,8 @@ def sample_set_target(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTargetGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetTargetGlobalForwardingRuleRequest): request = compute.SetTargetGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2220,8 +2215,8 @@ def sample_set_target(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, forwarding_rule, target_reference_resource] ) @@ -2231,10 +2226,8 @@ def sample_set_target(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTargetGlobalForwardingRuleRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetTargetGlobalForwardingRuleRequest): request = compute.SetTargetGlobalForwardingRuleRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py index a57bc4c200ab..502ba90618c6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -510,7 +511,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, GlobalNetworkEndpointGroupsTransport]] = None, + transport: Optional[ + Union[ + str, + GlobalNetworkEndpointGroupsTransport, + Callable[..., GlobalNetworkEndpointGroupsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -522,9 +529,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, GlobalNetworkEndpointGroupsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,GlobalNetworkEndpointGroupsTransport,Callable[..., GlobalNetworkEndpointGroupsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GlobalNetworkEndpointGroupsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -638,8 +647,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[GlobalNetworkEndpointGroupsTransport], + Callable[..., GlobalNetworkEndpointGroupsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., GlobalNetworkEndpointGroupsTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -733,8 +752,8 @@ def sample_attach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -748,10 +767,8 @@ def sample_attach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest ): @@ -883,8 +900,8 @@ def sample_attach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -898,10 +915,8 @@ def sample_attach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest ): @@ -1049,8 +1064,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1058,10 +1073,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalNetworkEndpointGroupRequest): request = compute.DeleteGlobalNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1174,8 +1187,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1183,10 +1196,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalNetworkEndpointGroupRequest): request = compute.DeleteGlobalNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1331,8 +1342,8 @@ def sample_detach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1346,10 +1357,8 @@ def sample_detach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest ): @@ -1481,8 +1490,8 @@ def sample_detach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1496,10 +1505,8 @@ def sample_detach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest ): @@ -1650,8 +1657,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1659,10 +1666,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetGlobalNetworkEndpointGroupRequest): request = compute.GetGlobalNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1771,8 +1776,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network_endpoint_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1780,10 +1785,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertGlobalNetworkEndpointGroupRequest): request = compute.InsertGlobalNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1889,8 +1892,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network_endpoint_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1898,10 +1901,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertGlobalNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertGlobalNetworkEndpointGroupRequest): request = compute.InsertGlobalNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2026,8 +2027,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -2035,10 +2036,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListGlobalNetworkEndpointGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListGlobalNetworkEndpointGroupsRequest): request = compute.ListGlobalNetworkEndpointGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2155,8 +2154,8 @@ def sample_list_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -2164,10 +2163,8 @@ def sample_list_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest ): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py index fd7a58c1a217..61f7f00c030e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_operations/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -503,7 +504,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, GlobalOperationsTransport]] = None, + transport: Optional[ + Union[ + str, GlobalOperationsTransport, Callable[..., GlobalOperationsTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -515,9 +520,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, GlobalOperationsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,GlobalOperationsTransport,Callable[..., GlobalOperationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GlobalOperationsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -629,8 +636,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[GlobalOperationsTransport], + Callable[..., GlobalOperationsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., GlobalOperationsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -708,8 +723,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -717,10 +732,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListGlobalOperationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListGlobalOperationsRequest): request = compute.AggregatedListGlobalOperationsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -831,8 +844,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -840,10 +853,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalOperationRequest): request = compute.DeleteGlobalOperationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -966,8 +977,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -975,10 +986,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetGlobalOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetGlobalOperationRequest): request = compute.GetGlobalOperationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1082,8 +1091,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1091,10 +1100,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListGlobalOperationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListGlobalOperationsRequest): request = compute.ListGlobalOperationsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1231,8 +1238,8 @@ def sample_wait(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -1240,10 +1247,8 @@ def sample_wait(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.WaitGlobalOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.WaitGlobalOperationRequest): request = compute.WaitGlobalOperationRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py index 7f2f5c51dc1b..7e92e9039b38 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_organization_operations/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -507,7 +508,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, GlobalOrganizationOperationsTransport]] = None, + transport: Optional[ + Union[ + str, + GlobalOrganizationOperationsTransport, + Callable[..., GlobalOrganizationOperationsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -519,9 +526,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, GlobalOrganizationOperationsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,GlobalOrganizationOperationsTransport,Callable[..., GlobalOrganizationOperationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GlobalOrganizationOperationsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -637,8 +646,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[GlobalOrganizationOperationsTransport], + Callable[..., GlobalOrganizationOperationsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., GlobalOrganizationOperationsTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -715,8 +734,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([operation]) if request is not None and has_flattened_params: raise ValueError( @@ -724,10 +743,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalOrganizationOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalOrganizationOperationRequest): request = compute.DeleteGlobalOrganizationOperationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -841,8 +858,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([operation]) if request is not None and has_flattened_params: raise ValueError( @@ -850,10 +867,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetGlobalOrganizationOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetGlobalOrganizationOperationRequest): request = compute.GetGlobalOrganizationOperationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -947,10 +962,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListGlobalOrganizationOperationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListGlobalOrganizationOperationsRequest): request = compute.ListGlobalOrganizationOperationsRequest(request) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py index 7acd6757b6f6..4569963cc063 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -510,7 +511,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, GlobalPublicDelegatedPrefixesTransport]] = None, + transport: Optional[ + Union[ + str, + GlobalPublicDelegatedPrefixesTransport, + Callable[..., GlobalPublicDelegatedPrefixesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -522,9 +529,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, GlobalPublicDelegatedPrefixesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,GlobalPublicDelegatedPrefixesTransport,Callable[..., GlobalPublicDelegatedPrefixesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the GlobalPublicDelegatedPrefixesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -642,8 +651,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[GlobalPublicDelegatedPrefixesTransport], + Callable[..., GlobalPublicDelegatedPrefixesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., GlobalPublicDelegatedPrefixesTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -726,8 +745,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -735,10 +754,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalPublicDelegatedPrefixeRequest): request = compute.DeleteGlobalPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -848,8 +865,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -857,10 +874,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteGlobalPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteGlobalPublicDelegatedPrefixeRequest): request = compute.DeleteGlobalPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1002,8 +1017,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -1011,10 +1026,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetGlobalPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetGlobalPublicDelegatedPrefixeRequest): request = compute.GetGlobalPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1125,8 +1138,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_delegated_prefix_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1134,10 +1147,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertGlobalPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertGlobalPublicDelegatedPrefixeRequest): request = compute.InsertGlobalPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1245,8 +1256,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_delegated_prefix_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1254,10 +1265,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertGlobalPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertGlobalPublicDelegatedPrefixeRequest): request = compute.InsertGlobalPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1382,8 +1391,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1391,10 +1400,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListGlobalPublicDelegatedPrefixesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListGlobalPublicDelegatedPrefixesRequest): request = compute.ListGlobalPublicDelegatedPrefixesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1517,8 +1524,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, public_delegated_prefix, public_delegated_prefix_resource] ) @@ -1528,10 +1535,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchGlobalPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchGlobalPublicDelegatedPrefixeRequest): request = compute.PatchGlobalPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1656,8 +1661,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, public_delegated_prefix, public_delegated_prefix_resource] ) @@ -1667,10 +1672,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchGlobalPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchGlobalPublicDelegatedPrefixeRequest): request = compute.PatchGlobalPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py index 1da3c2381e76..839c0e906ba7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/health_checks/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, HealthChecksTransport]] = None, + transport: Optional[ + Union[str, HealthChecksTransport, Callable[..., HealthChecksTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, HealthChecksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,HealthChecksTransport,Callable[..., HealthChecksTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the HealthChecksTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[HealthChecksTransport], Callable[..., HealthChecksTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., HealthChecksTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -709,8 +721,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -718,10 +730,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListHealthChecksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListHealthChecksRequest): request = compute.AggregatedListHealthChecksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -831,8 +841,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check]) if request is not None and has_flattened_params: raise ValueError( @@ -840,10 +850,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteHealthCheckRequest): request = compute.DeleteHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -951,8 +959,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check]) if request is not None and has_flattened_params: raise ValueError( @@ -960,10 +968,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteHealthCheckRequest): request = compute.DeleteHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1110,8 +1116,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check]) if request is not None and has_flattened_params: raise ValueError( @@ -1119,10 +1125,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetHealthCheckRequest): request = compute.GetHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1228,8 +1232,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1237,10 +1241,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertHealthCheckRequest): request = compute.InsertHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1341,8 +1343,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1350,10 +1352,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertHealthCheckRequest): request = compute.InsertHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1476,8 +1476,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1485,10 +1485,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListHealthChecksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListHealthChecksRequest): request = compute.ListHealthChecksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1607,8 +1605,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check, health_check_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1616,10 +1614,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchHealthCheckRequest): request = compute.PatchHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1738,8 +1734,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check, health_check_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1747,10 +1743,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchHealthCheckRequest): request = compute.PatchHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1891,8 +1885,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check, health_check_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1900,10 +1894,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateHealthCheckRequest): request = compute.UpdateHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2020,8 +2012,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, health_check, health_check_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2029,10 +2021,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateHealthCheckRequest): request = compute.UpdateHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py index afe04f020957..29aa1952847b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/image_family_views/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -502,7 +503,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ImageFamilyViewsTransport]] = None, + transport: Optional[ + Union[ + str, ImageFamilyViewsTransport, Callable[..., ImageFamilyViewsTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -514,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ImageFamilyViewsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ImageFamilyViewsTransport,Callable[..., ImageFamilyViewsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ImageFamilyViewsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -628,8 +635,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ImageFamilyViewsTransport], + Callable[..., ImageFamilyViewsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ImageFamilyViewsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -719,8 +734,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, family]) if request is not None and has_flattened_params: raise ValueError( @@ -728,10 +743,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetImageFamilyViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetImageFamilyViewRequest): request = compute.GetImageFamilyViewRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py index ba7e6c6c8c04..c1adbaa5e944 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/images/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ImagesTransport]] = None, + transport: Optional[ + Union[str, ImagesTransport, Callable[..., ImagesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ImagesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ImagesTransport,Callable[..., ImagesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ImagesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ImagesTransport], Callable[..., ImagesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ImagesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -706,8 +718,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image]) if request is not None and has_flattened_params: raise ValueError( @@ -715,10 +727,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteImageRequest): request = compute.DeleteImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -823,8 +833,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image]) if request is not None and has_flattened_params: raise ValueError( @@ -832,10 +842,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteImageRequest): request = compute.DeleteImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -973,8 +981,8 @@ def sample_deprecate(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image, deprecation_status_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -982,10 +990,8 @@ def sample_deprecate(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeprecateImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeprecateImageRequest): request = compute.DeprecateImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1101,8 +1107,8 @@ def sample_deprecate(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image, deprecation_status_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1110,10 +1116,8 @@ def sample_deprecate(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeprecateImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeprecateImageRequest): request = compute.DeprecateImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1246,8 +1250,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image]) if request is not None and has_flattened_params: raise ValueError( @@ -1255,10 +1259,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetImageRequest): request = compute.GetImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1374,8 +1376,8 @@ def sample_get_from_family(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, family]) if request is not None and has_flattened_params: raise ValueError( @@ -1383,10 +1385,8 @@ def sample_get_from_family(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetFromFamilyImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetFromFamilyImageRequest): request = compute.GetFromFamilyImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1515,8 +1515,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1524,10 +1524,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyImageRequest): request = compute.GetIamPolicyImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1632,8 +1630,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1641,10 +1639,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertImageRequest): request = compute.InsertImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1744,8 +1740,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1753,10 +1749,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertImageRequest): request = compute.InsertImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1884,8 +1878,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1893,10 +1887,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListImagesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListImagesRequest): request = compute.ListImagesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2011,8 +2003,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image, image_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2020,10 +2012,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchImageRequest): request = compute.PatchImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2138,8 +2128,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, image, image_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2147,10 +2137,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchImageRequest): request = compute.PatchImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2313,8 +2301,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_policy_request_resource] ) @@ -2324,10 +2312,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyImageRequest): request = compute.SetIamPolicyImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2448,8 +2434,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -2459,10 +2445,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsImageRequest): request = compute.SetLabelsImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2583,8 +2567,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -2594,10 +2578,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsImageRequest): request = compute.SetLabelsImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2740,8 +2722,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -2751,10 +2733,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsImageRequest): request = compute.TestIamPermissionsImageRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/__init__.py new file mode 100644 index 000000000000..f2febe6d66ea --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstanceGroupManagerResizeRequestsClient + +__all__ = ("InstanceGroupManagerResizeRequestsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py new file mode 100644 index 000000000000..076049e74656 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/client.py @@ -0,0 +1,2019 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import extended_operation # type: ignore + +from google.cloud.compute_v1.services.instance_group_manager_resize_requests import ( + pagers, +) +from google.cloud.compute_v1.types import compute + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + InstanceGroupManagerResizeRequestsTransport, +) +from .transports.rest import InstanceGroupManagerResizeRequestsRestTransport + + +class InstanceGroupManagerResizeRequestsClientMeta(type): + """Metaclass for the InstanceGroupManagerResizeRequests client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[InstanceGroupManagerResizeRequestsTransport]] + _transport_registry["rest"] = InstanceGroupManagerResizeRequestsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[InstanceGroupManagerResizeRequestsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstanceGroupManagerResizeRequestsClient( + metaclass=InstanceGroupManagerResizeRequestsClientMeta +): + """The InstanceGroupManagerResizeRequests API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceGroupManagerResizeRequestsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceGroupManagerResizeRequestsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceGroupManagerResizeRequestsTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceGroupManagerResizeRequestsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ( + InstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + ) + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ( + InstanceGroupManagerResizeRequestsClient.DEFAULT_MTLS_ENDPOINT + ) + else: + api_endpoint = InstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = InstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = InstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or InstanceGroupManagerResizeRequestsClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + InstanceGroupManagerResizeRequestsTransport, + Callable[..., InstanceGroupManagerResizeRequestsTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance group manager resize requests client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InstanceGroupManagerResizeRequestsTransport,Callable[..., InstanceGroupManagerResizeRequestsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceGroupManagerResizeRequestsTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = InstanceGroupManagerResizeRequestsClient._read_environment_variables() + self._client_cert_source = ( + InstanceGroupManagerResizeRequestsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + InstanceGroupManagerResizeRequestsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, InstanceGroupManagerResizeRequestsTransport + ) + if transport_provided: + # transport is a InstanceGroupManagerResizeRequestsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast( + InstanceGroupManagerResizeRequestsTransport, transport + ) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[InstanceGroupManagerResizeRequestsTransport], + Callable[..., InstanceGroupManagerResizeRequestsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., InstanceGroupManagerResizeRequestsTransport], + transport, + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def cancel_unary( + self, + request: Optional[ + Union[compute.CancelInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Cancels the specified resize request and removes it + from the queue. Cancelled resize request does no longer + wait for the resources to be provisioned. Cancel is only + possible for requests that are accepted in the queue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_cancel(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.CancelInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + resize_request="resize_request_value", + zone="zone_value", + ) + + # Make the request + response = client.cancel(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CancelInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + InstanceGroupManagerResizeRequests.Cancel. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. The + name should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. The name should conform to + RFC1035 or be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request to + cancel. The name should conform to + RFC1035 or be a resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, instance_group_manager, resize_request] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.CancelInstanceGroupManagerResizeRequestRequest + ): + request = compute.CancelInstanceGroupManagerResizeRequestRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel( + self, + request: Optional[ + Union[compute.CancelInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Cancels the specified resize request and removes it + from the queue. Cancelled resize request does no longer + wait for the resources to be provisioned. Cancel is only + possible for requests that are accepted in the queue. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_cancel(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.CancelInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + resize_request="resize_request_value", + zone="zone_value", + ) + + # Make the request + response = client.cancel(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CancelInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + InstanceGroupManagerResizeRequests.Cancel. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. The + name should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. The name should conform to + RFC1035 or be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request to + cancel. The name should conform to + RFC1035 or be a resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, instance_group_manager, resize_request] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.CancelInstanceGroupManagerResizeRequestRequest + ): + request = compute.CancelInstanceGroupManagerResizeRequestRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary( + self, + request: Optional[ + Union[compute.DeleteInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified, inactive resize request. + Requests that are still active cannot be deleted. + Deleting request does not delete instances that were + provisioned previously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + resize_request="resize_request_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + InstanceGroupManagerResizeRequests.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. The + name should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. The name should conform to + RFC1035 or be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request to + delete. The name should conform to + RFC1035 or be a resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, instance_group_manager, resize_request] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.DeleteInstanceGroupManagerResizeRequestRequest + ): + request = compute.DeleteInstanceGroupManagerResizeRequestRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[ + Union[compute.DeleteInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified, inactive resize request. + Requests that are still active cannot be deleted. + Deleting request does not delete instances that were + provisioned previously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + resize_request="resize_request_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + InstanceGroupManagerResizeRequests.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. The + name should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. The name should conform to + RFC1035 or be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request to + delete. The name should conform to + RFC1035 or be a resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, instance_group_manager, resize_request] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.DeleteInstanceGroupManagerResizeRequestRequest + ): + request = compute.DeleteInstanceGroupManagerResizeRequestRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get( + self, + request: Optional[ + Union[compute.GetInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceGroupManagerResizeRequest: + r"""Returns all of the details about the specified resize + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + resize_request="resize_request_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + InstanceGroupManagerResizeRequests.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the + href="/compute/docs/regions-zones/#available">zone + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. Name should conform to RFC1035 or + be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request. Name + should conform to RFC1035 or be a + resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest: + InstanceGroupManagerResizeRequest + represents a request to create a number + of VMs: either immediately or by queuing + the request for the specified time. This + resize request is nested under + InstanceGroupManager and the VMs created + by this request are added to the owning + InstanceGroupManager. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, instance_group_manager, resize_request] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetInstanceGroupManagerResizeRequestRequest): + request = compute.GetInstanceGroupManagerResizeRequestRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[ + Union[compute.InsertInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_manager_resize_request_resource: Optional[ + compute.InstanceGroupManagerResizeRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a new resize request that starts provisioning + VMs immediately or queues VM creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + InstanceGroupManagerResizeRequests.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located and + where the resize request will be + created. Name should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group to which the resize request will + be added. Name should conform to RFC1035 + or be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resize_request_resource (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest): + The body resource for this request + This corresponds to the ``instance_group_manager_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + project, + zone, + instance_group_manager, + instance_group_manager_resize_request_resource, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.InsertInstanceGroupManagerResizeRequestRequest + ): + request = compute.InsertInstanceGroupManagerResizeRequestRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_manager_resize_request_resource is not None: + request.instance_group_manager_resize_request_resource = ( + instance_group_manager_resize_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[ + Union[compute.InsertInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_manager_resize_request_resource: Optional[ + compute.InstanceGroupManagerResizeRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new resize request that starts provisioning + VMs immediately or queues VM creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + InstanceGroupManagerResizeRequests.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located and + where the resize request will be + created. Name should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group to which the resize request will + be added. Name should conform to RFC1035 + or be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resize_request_resource (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest): + The body resource for this request + This corresponds to the ``instance_group_manager_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + project, + zone, + instance_group_manager, + instance_group_manager_resize_request_resource, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.InsertInstanceGroupManagerResizeRequestRequest + ): + request = compute.InsertInstanceGroupManagerResizeRequestRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_manager_resize_request_resource is not None: + request.instance_group_manager_resize_request_resource = ( + instance_group_manager_resize_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[ + Union[compute.ListInstanceGroupManagerResizeRequestsRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of resize requests that are + contained in the managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstanceGroupManagerResizeRequestsRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInstanceGroupManagerResizeRequestsRequest, dict]): + The request object. A request message for + InstanceGroupManagerResizeRequests.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone where the + managed instance group is located. The + name should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. The name should conform to + RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.instance_group_manager_resize_requests.pagers.ListPager: + [Output Only] A list of resize requests. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_group_manager]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.ListInstanceGroupManagerResizeRequestsRequest + ): + request = compute.ListInstanceGroupManagerResizeRequestsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instance_group_manager", request.instance_group_manager), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "InstanceGroupManagerResizeRequestsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("InstanceGroupManagerResizeRequestsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/pagers.py new file mode 100644 index 000000000000..e6f47967b27f --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/pagers.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.InstanceGroupManagerResizeRequestsListResponse` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.InstanceGroupManagerResizeRequestsListResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.InstanceGroupManagerResizeRequestsListResponse], + request: compute.ListInstanceGroupManagerResizeRequestsRequest, + response: compute.InstanceGroupManagerResizeRequestsListResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInstanceGroupManagerResizeRequestsRequest): + The initial request object. + response (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequestsListResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListInstanceGroupManagerResizeRequestsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.InstanceGroupManagerResizeRequestsListResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceGroupManagerResizeRequest]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/__init__.py new file mode 100644 index 000000000000..bc0501b5fca0 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstanceGroupManagerResizeRequestsTransport +from .rest import ( + InstanceGroupManagerResizeRequestsRestInterceptor, + InstanceGroupManagerResizeRequestsRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[InstanceGroupManagerResizeRequestsTransport]] +_transport_registry["rest"] = InstanceGroupManagerResizeRequestsRestTransport + +__all__ = ( + "InstanceGroupManagerResizeRequestsTransport", + "InstanceGroupManagerResizeRequestsRestTransport", + "InstanceGroupManagerResizeRequestsRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/base.py new file mode 100644 index 000000000000..d755d91c4440 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/base.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import zone_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class InstanceGroupManagerResizeRequestsTransport(abc.ABC): + """Abstract transport class for InstanceGroupManagerResizeRequests.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.cancel: gapic_v1.method.wrap_method( + self.cancel, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def cancel( + self, + ) -> Callable[ + [compute.CancelInstanceGroupManagerResizeRequestRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteInstanceGroupManagerResizeRequestRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetInstanceGroupManagerResizeRequestRequest], + Union[ + compute.InstanceGroupManagerResizeRequest, + Awaitable[compute.InstanceGroupManagerResizeRequest], + ], + ]: + raise NotImplementedError() + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertInstanceGroupManagerResizeRequestRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListInstanceGroupManagerResizeRequestsRequest], + Union[ + compute.InstanceGroupManagerResizeRequestsListResponse, + Awaitable[compute.InstanceGroupManagerResizeRequestsListResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("InstanceGroupManagerResizeRequestsTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/rest.py new file mode 100644 index 000000000000..08e09796696b --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_manager_resize_requests/transports/rest.py @@ -0,0 +1,901 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import InstanceGroupManagerResizeRequestsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InstanceGroupManagerResizeRequestsRestInterceptor: + """Interceptor for InstanceGroupManagerResizeRequests. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstanceGroupManagerResizeRequestsRestTransport. + + .. code-block:: python + class MyCustomInstanceGroupManagerResizeRequestsInterceptor(InstanceGroupManagerResizeRequestsRestInterceptor): + def pre_cancel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstanceGroupManagerResizeRequestsRestTransport(interceptor=MyCustomInstanceGroupManagerResizeRequestsInterceptor()) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + + + """ + + def pre_cancel( + self, + request: compute.CancelInstanceGroupManagerResizeRequestRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.CancelInstanceGroupManagerResizeRequestRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for cancel + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_cancel(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for cancel + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagerResizeRequests server but before + it is returned to user code. + """ + return response + + def pre_delete( + self, + request: compute.DeleteInstanceGroupManagerResizeRequestRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.DeleteInstanceGroupManagerResizeRequestRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagerResizeRequests server but before + it is returned to user code. + """ + return response + + def pre_get( + self, + request: compute.GetInstanceGroupManagerResizeRequestRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.GetInstanceGroupManagerResizeRequestRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_get( + self, response: compute.InstanceGroupManagerResizeRequest + ) -> compute.InstanceGroupManagerResizeRequest: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagerResizeRequests server but before + it is returned to user code. + """ + return response + + def pre_insert( + self, + request: compute.InsertInstanceGroupManagerResizeRequestRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.InsertInstanceGroupManagerResizeRequestRequest, + Sequence[Tuple[str, str]], + ]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagerResizeRequests server but before + it is returned to user code. + """ + return response + + def pre_list( + self, + request: compute.ListInstanceGroupManagerResizeRequestsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.ListInstanceGroupManagerResizeRequestsRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_list( + self, response: compute.InstanceGroupManagerResizeRequestsListResponse + ) -> compute.InstanceGroupManagerResizeRequestsListResponse: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the InstanceGroupManagerResizeRequests server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InstanceGroupManagerResizeRequestsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstanceGroupManagerResizeRequestsRestInterceptor + + +class InstanceGroupManagerResizeRequestsRestTransport( + InstanceGroupManagerResizeRequestsTransport +): + """REST backend transport for InstanceGroupManagerResizeRequests. + + The InstanceGroupManagerResizeRequests API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[InstanceGroupManagerResizeRequestsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or InstanceGroupManagerResizeRequestsRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _Cancel(InstanceGroupManagerResizeRequestsRestStub): + def __hash__(self): + return hash("Cancel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.CancelInstanceGroupManagerResizeRequestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the cancel method over HTTP. + + Args: + request (~.compute.CancelInstanceGroupManagerResizeRequestRequest): + The request object. A request message for + InstanceGroupManagerResizeRequests.Cancel. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + Note that completed Operation resources have a limited + retention period. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}/cancel", + }, + ] + request, metadata = self._interceptor.pre_cancel(request, metadata) + pb_request = compute.CancelInstanceGroupManagerResizeRequestRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel(resp) + return resp + + class _Delete(InstanceGroupManagerResizeRequestsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.DeleteInstanceGroupManagerResizeRequestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteInstanceGroupManagerResizeRequestRequest): + The request object. A request message for + InstanceGroupManagerResizeRequests.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + Note that completed Operation resources have a limited + retention period. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}", + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteInstanceGroupManagerResizeRequestRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(InstanceGroupManagerResizeRequestsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetInstanceGroupManagerResizeRequestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceGroupManagerResizeRequest: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInstanceGroupManagerResizeRequestRequest): + The request object. A request message for + InstanceGroupManagerResizeRequests.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManagerResizeRequest: + InstanceGroupManagerResizeRequest + represents a request to create a number + of VMs: either immediately or by queuing + the request for the specified time. This + resize request is nested under + InstanceGroupManager and the VMs created + by this request are added to the owning + InstanceGroupManager. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}", + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInstanceGroupManagerResizeRequestRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManagerResizeRequest() + pb_resp = compute.InstanceGroupManagerResizeRequest.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Insert(InstanceGroupManagerResizeRequestsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.InsertInstanceGroupManagerResizeRequestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertInstanceGroupManagerResizeRequestRequest): + The request object. A request message for + InstanceGroupManagerResizeRequests.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + Note that completed Operation resources have a limited + retention period. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests", + "body": "instance_group_manager_resize_request_resource", + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertInstanceGroupManagerResizeRequestRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(InstanceGroupManagerResizeRequestsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.ListInstanceGroupManagerResizeRequestsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceGroupManagerResizeRequestsListResponse: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInstanceGroupManagerResizeRequestsRequest): + The request object. A request message for + InstanceGroupManagerResizeRequests.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceGroupManagerResizeRequestsListResponse: + [Output Only] A list of resize requests. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests", + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListInstanceGroupManagerResizeRequestsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManagerResizeRequestsListResponse() + pb_resp = compute.InstanceGroupManagerResizeRequestsListResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def cancel( + self, + ) -> Callable[ + [compute.CancelInstanceGroupManagerResizeRequestRequest], compute.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Cancel(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteInstanceGroupManagerResizeRequestRequest], compute.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get( + self, + ) -> Callable[ + [compute.GetInstanceGroupManagerResizeRequestRequest], + compute.InstanceGroupManagerResizeRequest, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertInstanceGroupManagerResizeRequestRequest], compute.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[ + [compute.ListInstanceGroupManagerResizeRequestsRequest], + compute.InstanceGroupManagerResizeRequestsListResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("InstanceGroupManagerResizeRequestsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py index 0bde44f7b8b9..6f3565388942 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstanceGroupManagersTransport]] = None, + transport: Optional[ + Union[ + str, + InstanceGroupManagersTransport, + Callable[..., InstanceGroupManagersTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InstanceGroupManagersTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InstanceGroupManagersTransport,Callable[..., InstanceGroupManagersTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceGroupManagersTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -634,8 +643,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InstanceGroupManagersTransport], + Callable[..., InstanceGroupManagersTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstanceGroupManagersTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -750,8 +767,8 @@ def sample_abandon_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -766,10 +783,8 @@ def sample_abandon_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AbandonInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AbandonInstancesInstanceGroupManagerRequest): request = compute.AbandonInstancesInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -918,8 +933,8 @@ def sample_abandon_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -934,10 +949,8 @@ def sample_abandon_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AbandonInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AbandonInstancesInstanceGroupManagerRequest): request = compute.AbandonInstancesInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1074,8 +1087,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1083,10 +1096,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListInstanceGroupManagersRequest): request = compute.AggregatedListInstanceGroupManagersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1218,8 +1229,8 @@ def sample_apply_updates_to_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1234,10 +1245,8 @@ def sample_apply_updates_to_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest ): @@ -1380,8 +1389,8 @@ def sample_apply_updates_to_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1396,10 +1405,8 @@ def sample_apply_updates_to_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest ): @@ -1572,8 +1579,8 @@ def sample_create_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1588,10 +1595,8 @@ def sample_create_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CreateInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CreateInstancesInstanceGroupManagerRequest): request = compute.CreateInstancesInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1733,8 +1738,8 @@ def sample_create_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1749,10 +1754,8 @@ def sample_create_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CreateInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CreateInstancesInstanceGroupManagerRequest): request = compute.CreateInstancesInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1906,8 +1909,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -1915,10 +1918,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstanceGroupManagerRequest): request = compute.DeleteInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2043,8 +2044,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -2052,10 +2053,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstanceGroupManagerRequest): request = compute.DeleteInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2224,8 +2223,8 @@ def sample_delete_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2240,10 +2239,8 @@ def sample_delete_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstancesInstanceGroupManagerRequest): request = compute.DeleteInstancesInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2391,8 +2388,8 @@ def sample_delete_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2407,10 +2404,8 @@ def sample_delete_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstancesInstanceGroupManagerRequest): request = compute.DeleteInstancesInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2571,8 +2566,8 @@ def sample_delete_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2587,10 +2582,8 @@ def sample_delete_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePerInstanceConfigsInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DeletePerInstanceConfigsInstanceGroupManagerRequest ): @@ -2735,8 +2728,8 @@ def sample_delete_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2751,10 +2744,8 @@ def sample_delete_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePerInstanceConfigsInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DeletePerInstanceConfigsInstanceGroupManagerRequest ): @@ -2920,8 +2911,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -2929,10 +2920,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInstanceGroupManagerRequest): request = compute.GetInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3061,8 +3050,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3070,10 +3059,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstanceGroupManagerRequest): request = compute.InsertInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3203,8 +3190,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3212,10 +3199,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstanceGroupManagerRequest): request = compute.InsertInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3356,8 +3341,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -3365,10 +3350,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInstanceGroupManagersRequest): request = compute.ListInstanceGroupManagersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3503,8 +3486,8 @@ def sample_list_errors(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -3512,10 +3495,8 @@ def sample_list_errors(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListErrorsInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListErrorsInstanceGroupManagersRequest): request = compute.ListErrorsInstanceGroupManagersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3656,8 +3637,8 @@ def sample_list_managed_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -3665,10 +3646,8 @@ def sample_list_managed_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListManagedInstancesInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListManagedInstancesInstanceGroupManagersRequest ): @@ -3806,8 +3785,8 @@ def sample_list_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -3815,10 +3794,8 @@ def sample_list_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListPerInstanceConfigsInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListPerInstanceConfigsInstanceGroupManagersRequest ): @@ -3972,8 +3949,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance_group_manager, instance_group_manager_resource] ) @@ -3983,10 +3960,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchInstanceGroupManagerRequest): request = compute.PatchInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4129,8 +4104,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance_group_manager, instance_group_manager_resource] ) @@ -4140,10 +4115,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchInstanceGroupManagerRequest): request = compute.PatchInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4306,8 +4279,8 @@ def sample_patch_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4322,10 +4295,8 @@ def sample_patch_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPerInstanceConfigsInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.PatchPerInstanceConfigsInstanceGroupManagerRequest ): @@ -4472,8 +4443,8 @@ def sample_patch_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4488,10 +4459,8 @@ def sample_patch_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPerInstanceConfigsInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.PatchPerInstanceConfigsInstanceGroupManagerRequest ): @@ -4672,8 +4641,8 @@ def sample_recreate_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4688,10 +4657,8 @@ def sample_recreate_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RecreateInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RecreateInstancesInstanceGroupManagerRequest ): @@ -4840,8 +4807,8 @@ def sample_recreate_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4856,10 +4823,8 @@ def sample_recreate_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RecreateInstancesInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RecreateInstancesInstanceGroupManagerRequest ): @@ -5044,8 +5009,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager, size]) if request is not None and has_flattened_params: raise ValueError( @@ -5053,10 +5018,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeInstanceGroupManagerRequest): request = compute.ResizeInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5212,8 +5175,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_manager, size]) if request is not None and has_flattened_params: raise ValueError( @@ -5221,10 +5184,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeInstanceGroupManagerRequest): request = compute.ResizeInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5385,8 +5346,8 @@ def sample_set_instance_template(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5401,10 +5362,8 @@ def sample_set_instance_template(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetInstanceTemplateInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetInstanceTemplateInstanceGroupManagerRequest ): @@ -5547,8 +5506,8 @@ def sample_set_instance_template(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5563,10 +5522,8 @@ def sample_set_instance_template(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetInstanceTemplateInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetInstanceTemplateInstanceGroupManagerRequest ): @@ -5737,8 +5694,8 @@ def sample_set_target_pools(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5753,10 +5710,8 @@ def sample_set_target_pools(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTargetPoolsInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetTargetPoolsInstanceGroupManagerRequest): request = compute.SetTargetPoolsInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5897,8 +5852,8 @@ def sample_set_target_pools(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5913,10 +5868,8 @@ def sample_set_target_pools(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTargetPoolsInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetTargetPoolsInstanceGroupManagerRequest): request = compute.SetTargetPoolsInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -6079,8 +6032,8 @@ def sample_update_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -6095,10 +6048,8 @@ def sample_update_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest ): @@ -6245,8 +6196,8 @@ def sample_update_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -6261,10 +6212,8 @@ def sample_update_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest ): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py index 9d2f177d1e2c..f3c334045047 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_groups/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstanceGroupsTransport]] = None, + transport: Optional[ + Union[str, InstanceGroupsTransport, Callable[..., InstanceGroupsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +521,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InstanceGroupsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InstanceGroupsTransport,Callable[..., InstanceGroupsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceGroupsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +637,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InstanceGroupsTransport], Callable[..., InstanceGroupsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstanceGroupsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -734,8 +746,8 @@ def sample_add_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -750,10 +762,8 @@ def sample_add_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddInstancesInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddInstancesInstanceGroupRequest): request = compute.AddInstancesInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -888,8 +898,8 @@ def sample_add_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -904,10 +914,8 @@ def sample_add_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddInstancesInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddInstancesInstanceGroupRequest): request = compute.AddInstancesInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1044,8 +1052,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1053,10 +1061,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListInstanceGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListInstanceGroupsRequest): request = compute.AggregatedListInstanceGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1178,8 +1184,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1187,10 +1193,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstanceGroupRequest): request = compute.DeleteInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1313,8 +1317,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1322,10 +1326,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstanceGroupRequest): request = compute.DeleteInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1484,8 +1486,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1493,10 +1495,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInstanceGroupRequest): request = compute.GetInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1614,8 +1614,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1623,10 +1623,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstanceGroupRequest): request = compute.InsertInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1743,8 +1741,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1752,10 +1750,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstanceGroupRequest): request = compute.InsertInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1897,8 +1893,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -1906,10 +1902,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInstanceGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInstanceGroupsRequest): request = compute.ListInstanceGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2051,8 +2045,8 @@ def sample_list_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2067,10 +2061,8 @@ def sample_list_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInstancesInstanceGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInstancesInstanceGroupsRequest): request = compute.ListInstancesInstanceGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2218,8 +2210,8 @@ def sample_remove_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2234,10 +2226,8 @@ def sample_remove_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveInstancesInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveInstancesInstanceGroupRequest): request = compute.RemoveInstancesInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2376,8 +2366,8 @@ def sample_remove_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2392,10 +2382,8 @@ def sample_remove_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveInstancesInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveInstancesInstanceGroupRequest): request = compute.RemoveInstancesInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2555,8 +2543,8 @@ def sample_set_named_ports(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2571,10 +2559,8 @@ def sample_set_named_ports(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetNamedPortsInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetNamedPortsInstanceGroupRequest): request = compute.SetNamedPortsInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2709,8 +2695,8 @@ def sample_set_named_ports(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2725,10 +2711,8 @@ def sample_set_named_ports(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetNamedPortsInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetNamedPortsInstanceGroupRequest): request = compute.SetNamedPortsInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/__init__.py new file mode 100644 index 000000000000..e0c63b6384c9 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstanceSettingsServiceClient + +__all__ = ("InstanceSettingsServiceClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py new file mode 100644 index 000000000000..20a54c242f81 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/client.py @@ -0,0 +1,1085 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import extended_operation # type: ignore + +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, InstanceSettingsServiceTransport +from .transports.rest import InstanceSettingsServiceRestTransport + + +class InstanceSettingsServiceClientMeta(type): + """Metaclass for the InstanceSettingsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[InstanceSettingsServiceTransport]] + _transport_registry["rest"] = InstanceSettingsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[InstanceSettingsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstanceSettingsServiceClient(metaclass=InstanceSettingsServiceClientMeta): + """The InstanceSettings API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstanceSettingsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstanceSettingsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + InstanceSettingsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = InstanceSettingsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = InstanceSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + InstanceSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = InstanceSettingsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = InstanceSettingsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or InstanceSettingsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + InstanceSettingsServiceTransport, + Callable[..., InstanceSettingsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instance settings service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InstanceSettingsServiceTransport,Callable[..., InstanceSettingsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceSettingsServiceTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = InstanceSettingsServiceClient._read_environment_variables() + self._client_cert_source = ( + InstanceSettingsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = InstanceSettingsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, InstanceSettingsServiceTransport) + if transport_provided: + # transport is a InstanceSettingsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(InstanceSettingsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or InstanceSettingsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[InstanceSettingsServiceTransport], + Callable[..., InstanceSettingsServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstanceSettingsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def get( + self, + request: Optional[Union[compute.GetInstanceSettingRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceSettings: + r"""Get Instance settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InstanceSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceSettingRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInstanceSettingRequest, dict]): + The request object. A request message for + InstanceSettingsService.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.InstanceSettings: + Represents a Instance Settings + resource. You can use instance settings + to configure default settings for + Compute Engine VM instances. For + example, you can use it to configure + default machine type of Compute Engine + VM instances. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetInstanceSettingRequest): + request = compute.GetInstanceSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary( + self, + request: Optional[Union[compute.PatchInstanceSettingRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_settings_resource: Optional[compute.InstanceSettings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Patch Instance settings + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.InstanceSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.PatchInstanceSettingRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchInstanceSettingRequest, dict]): + The request object. A request message for + InstanceSettingsService.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The zone scoping this request. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_settings_resource (google.cloud.compute_v1.types.InstanceSettings): + The body resource for this request + This corresponds to the ``instance_settings_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_settings_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.PatchInstanceSettingRequest): + request = compute.PatchInstanceSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_settings_resource is not None: + request.instance_settings_resource = instance_settings_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch( + self, + request: Optional[Union[compute.PatchInstanceSettingRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instance_settings_resource: Optional[compute.InstanceSettings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patch Instance settings + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.InstanceSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.PatchInstanceSettingRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchInstanceSettingRequest, dict]): + The request object. A request message for + InstanceSettingsService.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The zone scoping this request. It + should conform to RFC1035. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_settings_resource (google.cloud.compute_v1.types.InstanceSettings): + The body resource for this request + This corresponds to the ``instance_settings_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, instance_settings_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.PatchInstanceSettingRequest): + request = compute.PatchInstanceSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instance_settings_resource is not None: + request.instance_settings_resource = instance_settings_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "InstanceSettingsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("InstanceSettingsServiceClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/__init__.py new file mode 100644 index 000000000000..7451cfb86ff0 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstanceSettingsServiceTransport +from .rest import ( + InstanceSettingsServiceRestInterceptor, + InstanceSettingsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[InstanceSettingsServiceTransport]] +_transport_registry["rest"] = InstanceSettingsServiceRestTransport + +__all__ = ( + "InstanceSettingsServiceTransport", + "InstanceSettingsServiceRestTransport", + "InstanceSettingsServiceRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/base.py new file mode 100644 index 000000000000..a1ba166f0c68 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/base.py @@ -0,0 +1,190 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import zone_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class InstanceSettingsServiceTransport(abc.ABC): + """Abstract transport class for InstanceSettingsService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetInstanceSettingRequest], + Union[compute.InstanceSettings, Awaitable[compute.InstanceSettings]], + ]: + raise NotImplementedError() + + @property + def patch( + self, + ) -> Callable[ + [compute.PatchInstanceSettingRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("InstanceSettingsServiceTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/rest.py new file mode 100644 index 000000000000..cf935e822f44 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_settings_service/transports/rest.py @@ -0,0 +1,459 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import InstanceSettingsServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class InstanceSettingsServiceRestInterceptor: + """Interceptor for InstanceSettingsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstanceSettingsServiceRestTransport. + + .. code-block:: python + class MyCustomInstanceSettingsServiceInterceptor(InstanceSettingsServiceRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstanceSettingsServiceRestTransport(interceptor=MyCustomInstanceSettingsServiceInterceptor()) + client = InstanceSettingsServiceClient(transport=transport) + + + """ + + def pre_get( + self, + request: compute.GetInstanceSettingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetInstanceSettingRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceSettingsService server. + """ + return request, metadata + + def post_get(self, response: compute.InstanceSettings) -> compute.InstanceSettings: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the InstanceSettingsService server but before + it is returned to user code. + """ + return response + + def pre_patch( + self, + request: compute.PatchInstanceSettingRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.PatchInstanceSettingRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstanceSettingsService server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + Override in a subclass to manipulate the response + after it is returned by the InstanceSettingsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class InstanceSettingsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstanceSettingsServiceRestInterceptor + + +class InstanceSettingsServiceRestTransport(InstanceSettingsServiceTransport): + """REST backend transport for InstanceSettingsService. + + The InstanceSettings API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[InstanceSettingsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InstanceSettingsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get(InstanceSettingsServiceRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetInstanceSettingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.InstanceSettings: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInstanceSettingRequest): + The request object. A request message for + InstanceSettingsService.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.InstanceSettings: + Represents a Instance Settings + resource. You can use instance settings + to configure default settings for + Compute Engine VM instances. For + example, you can use it to configure + default machine type of Compute Engine + VM instances. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceSettings", + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetInstanceSettingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceSettings() + pb_resp = compute.InstanceSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _Patch(InstanceSettingsServiceRestStub): + def __hash__(self): + return hash("Patch") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.PatchInstanceSettingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchInstanceSettingRequest): + The request object. A request message for + InstanceSettingsService.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + Note that completed Operation resources have a limited + retention period. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceSettings", + "body": "instance_settings_resource", + }, + ] + request, metadata = self._interceptor.pre_patch(request, metadata) + pb_request = compute.PatchInstanceSettingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_patch(resp) + return resp + + @property + def get( + self, + ) -> Callable[[compute.GetInstanceSettingRequest], compute.InstanceSettings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch( + self, + ) -> Callable[[compute.PatchInstanceSettingRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("InstanceSettingsServiceRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py index 82b60b715a39..0c8ef0c13d05 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instance_templates/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstanceTemplatesTransport]] = None, + transport: Optional[ + Union[ + str, + InstanceTemplatesTransport, + Callable[..., InstanceTemplatesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InstanceTemplatesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InstanceTemplatesTransport,Callable[..., InstanceTemplatesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstanceTemplatesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InstanceTemplatesTransport], + Callable[..., InstanceTemplatesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstanceTemplatesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -716,8 +733,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -725,10 +742,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListInstanceTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListInstanceTemplatesRequest): request = compute.AggregatedListInstanceTemplatesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -841,8 +856,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, instance_template]) if request is not None and has_flattened_params: raise ValueError( @@ -850,10 +865,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstanceTemplateRequest): request = compute.DeleteInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -964,8 +977,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, instance_template]) if request is not None and has_flattened_params: raise ValueError( @@ -973,10 +986,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstanceTemplateRequest): request = compute.DeleteInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1117,8 +1128,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, instance_template]) if request is not None and has_flattened_params: raise ValueError( @@ -1126,10 +1137,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInstanceTemplateRequest): request = compute.GetInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1260,8 +1269,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1269,10 +1278,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyInstanceTemplateRequest): request = compute.GetIamPolicyInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1382,8 +1389,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, instance_template_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1391,10 +1398,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstanceTemplateRequest): request = compute.InsertInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1499,8 +1504,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, instance_template_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1508,10 +1513,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstanceTemplateRequest): request = compute.InsertInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1634,8 +1637,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1643,10 +1646,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInstanceTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInstanceTemplatesRequest): request = compute.ListInstanceTemplatesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1787,8 +1788,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_policy_request_resource] ) @@ -1798,10 +1799,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyInstanceTemplateRequest): request = compute.SetIamPolicyInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1922,8 +1921,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -1933,10 +1932,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsInstanceTemplateRequest): request = compute.TestIamPermissionsInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py index 6acc0b5ae2e0..f712f798a95a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instances/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstancesTransport]] = None, + transport: Optional[ + Union[str, InstancesTransport, Callable[..., InstancesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InstancesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InstancesTransport,Callable[..., InstancesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstancesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InstancesTransport], Callable[..., InstancesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstancesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -732,8 +744,8 @@ def sample_add_access_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, network_interface, access_config_resource] ) @@ -743,10 +755,8 @@ def sample_add_access_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddAccessConfigInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddAccessConfigInstanceRequest): request = compute.AddAccessConfigInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -884,8 +894,8 @@ def sample_add_access_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, network_interface, access_config_resource] ) @@ -895,10 +905,8 @@ def sample_add_access_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddAccessConfigInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddAccessConfigInstanceRequest): request = compute.AddAccessConfigInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1057,8 +1065,8 @@ def sample_add_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_add_resource_policies_request_resource] ) @@ -1068,10 +1076,8 @@ def sample_add_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddResourcePoliciesInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddResourcePoliciesInstanceRequest): request = compute.AddResourcePoliciesInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1205,8 +1211,8 @@ def sample_add_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_add_resource_policies_request_resource] ) @@ -1216,10 +1222,8 @@ def sample_add_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddResourcePoliciesInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddResourcePoliciesInstanceRequest): request = compute.AddResourcePoliciesInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1357,8 +1361,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1366,10 +1370,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListInstancesRequest): request = compute.AggregatedListInstancesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1496,8 +1498,8 @@ def sample_attach_disk(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, attached_disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1505,10 +1507,8 @@ def sample_attach_disk(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AttachDiskInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AttachDiskInstanceRequest): request = compute.AttachDiskInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1638,8 +1638,8 @@ def sample_attach_disk(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, attached_disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1647,10 +1647,8 @@ def sample_attach_disk(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AttachDiskInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AttachDiskInstanceRequest): request = compute.AttachDiskInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1798,8 +1796,8 @@ def sample_bulk_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, bulk_insert_instance_resource_resource] ) @@ -1809,10 +1807,8 @@ def sample_bulk_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.BulkInsertInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.BulkInsertInstanceRequest): request = compute.BulkInsertInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1934,8 +1930,8 @@ def sample_bulk_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, bulk_insert_instance_resource_resource] ) @@ -1945,10 +1941,8 @@ def sample_bulk_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.BulkInsertInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.BulkInsertInstanceRequest): request = compute.BulkInsertInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2095,8 +2089,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -2104,10 +2098,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstanceRequest): request = compute.DeleteInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2228,8 +2220,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -2237,10 +2229,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstanceRequest): request = compute.DeleteInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2402,8 +2392,8 @@ def sample_delete_access_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, access_config, network_interface] ) @@ -2413,10 +2403,8 @@ def sample_delete_access_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteAccessConfigInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteAccessConfigInstanceRequest): request = compute.DeleteAccessConfigInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2557,8 +2545,8 @@ def sample_delete_access_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, access_config, network_interface] ) @@ -2568,10 +2556,8 @@ def sample_delete_access_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteAccessConfigInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteAccessConfigInstanceRequest): request = compute.DeleteAccessConfigInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2729,8 +2715,8 @@ def sample_detach_disk(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, device_name]) if request is not None and has_flattened_params: raise ValueError( @@ -2738,10 +2724,8 @@ def sample_detach_disk(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DetachDiskInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DetachDiskInstanceRequest): request = compute.DetachDiskInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2872,8 +2856,8 @@ def sample_detach_disk(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, device_name]) if request is not None and has_flattened_params: raise ValueError( @@ -2881,10 +2865,8 @@ def sample_detach_disk(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DetachDiskInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DetachDiskInstanceRequest): request = compute.DetachDiskInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3033,8 +3015,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -3042,10 +3024,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInstanceRequest): request = compute.GetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3175,8 +3155,8 @@ def sample_get_effective_firewalls(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, network_interface]) if request is not None and has_flattened_params: raise ValueError( @@ -3184,10 +3164,8 @@ def sample_get_effective_firewalls(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetEffectiveFirewallsInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetEffectiveFirewallsInstanceRequest): request = compute.GetEffectiveFirewallsInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3309,8 +3287,8 @@ def sample_get_guest_attributes(): A guest attributes entry. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -3318,10 +3296,8 @@ def sample_get_guest_attributes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetGuestAttributesInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetGuestAttributesInstanceRequest): request = compute.GetGuestAttributesInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3462,8 +3438,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3471,10 +3447,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyInstanceRequest): request = compute.GetIamPolicyInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3592,8 +3566,8 @@ def sample_get_screenshot(): An instance's screenshot. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -3601,10 +3575,8 @@ def sample_get_screenshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetScreenshotInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetScreenshotInstanceRequest): request = compute.GetScreenshotInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3725,8 +3697,8 @@ def sample_get_serial_port_output(): An instance serial console output. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -3734,10 +3706,8 @@ def sample_get_serial_port_output(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetSerialPortOutputInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetSerialPortOutputInstanceRequest): request = compute.GetSerialPortOutputInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3857,8 +3827,8 @@ def sample_get_shielded_instance_identity(): A Shielded Instance Identity. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -3866,10 +3836,8 @@ def sample_get_shielded_instance_identity(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetShieldedInstanceIdentityInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetShieldedInstanceIdentityInstanceRequest): request = compute.GetShieldedInstanceIdentityInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3989,8 +3957,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3998,10 +3966,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstanceRequest): request = compute.InsertInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4118,8 +4084,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -4127,10 +4093,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstanceRequest): request = compute.InsertInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4269,8 +4233,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -4278,10 +4242,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInstancesRequest): request = compute.ListInstancesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4418,8 +4380,8 @@ def sample_list_referrers(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -4427,10 +4389,8 @@ def sample_list_referrers(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListReferrersInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListReferrersInstancesRequest): request = compute.ListReferrersInstancesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4561,8 +4521,8 @@ def sample_perform_maintenance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -4570,10 +4530,8 @@ def sample_perform_maintenance(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PerformMaintenanceInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PerformMaintenanceInstanceRequest): request = compute.PerformMaintenanceInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4695,8 +4653,8 @@ def sample_perform_maintenance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -4704,10 +4662,8 @@ def sample_perform_maintenance(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PerformMaintenanceInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PerformMaintenanceInstanceRequest): request = compute.PerformMaintenanceInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4860,8 +4816,8 @@ def sample_remove_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4876,10 +4832,8 @@ def sample_remove_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveResourcePoliciesInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveResourcePoliciesInstanceRequest): request = compute.RemoveResourcePoliciesInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5011,8 +4965,8 @@ def sample_remove_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5027,10 +4981,8 @@ def sample_remove_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveResourcePoliciesInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveResourcePoliciesInstanceRequest): request = compute.RemoveResourcePoliciesInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5181,8 +5133,8 @@ def sample_reset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -5190,10 +5142,8 @@ def sample_reset(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResetInstanceRequest): request = compute.ResetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5315,8 +5265,8 @@ def sample_reset(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -5324,10 +5274,8 @@ def sample_reset(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResetInstanceRequest): request = compute.ResetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5473,8 +5421,8 @@ def sample_resume(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -5482,10 +5430,8 @@ def sample_resume(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResumeInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResumeInstanceRequest): request = compute.ResumeInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5606,8 +5552,8 @@ def sample_resume(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -5615,10 +5561,8 @@ def sample_resume(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResumeInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResumeInstanceRequest): request = compute.ResumeInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5766,8 +5710,8 @@ def sample_send_diagnostic_interrupt(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -5775,10 +5719,8 @@ def sample_send_diagnostic_interrupt(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SendDiagnosticInterruptInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SendDiagnosticInterruptInstanceRequest): request = compute.SendDiagnosticInterruptInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5902,8 +5844,8 @@ def sample_set_deletion_protection(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -5911,10 +5853,8 @@ def sample_set_deletion_protection(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetDeletionProtectionInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetDeletionProtectionInstanceRequest): request = compute.SetDeletionProtectionInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -6036,8 +5976,8 @@ def sample_set_deletion_protection(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -6045,10 +5985,8 @@ def sample_set_deletion_protection(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetDeletionProtectionInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetDeletionProtectionInstanceRequest): request = compute.SetDeletionProtectionInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -6212,8 +6150,8 @@ def sample_set_disk_auto_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, auto_delete, device_name]) if request is not None and has_flattened_params: raise ValueError( @@ -6221,10 +6159,8 @@ def sample_set_disk_auto_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetDiskAutoDeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetDiskAutoDeleteInstanceRequest): request = compute.SetDiskAutoDeleteInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -6367,8 +6303,8 @@ def sample_set_disk_auto_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, auto_delete, device_name]) if request is not None and has_flattened_params: raise ValueError( @@ -6376,10 +6312,8 @@ def sample_set_disk_auto_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetDiskAutoDeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetDiskAutoDeleteInstanceRequest): request = compute.SetDiskAutoDeleteInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -6555,8 +6489,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_policy_request_resource] ) @@ -6566,10 +6500,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyInstanceRequest): request = compute.SetIamPolicyInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -6702,8 +6634,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_labels_request_resource] ) @@ -6713,10 +6645,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsInstanceRequest): request = compute.SetLabelsInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -6849,8 +6779,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_labels_request_resource] ) @@ -6860,10 +6790,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsInstanceRequest): request = compute.SetLabelsInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -7023,8 +6951,8 @@ def sample_set_machine_resources(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_machine_resources_request_resource] ) @@ -7034,10 +6962,8 @@ def sample_set_machine_resources(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetMachineResourcesInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetMachineResourcesInstanceRequest): request = compute.SetMachineResourcesInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -7172,8 +7098,8 @@ def sample_set_machine_resources(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_machine_resources_request_resource] ) @@ -7183,10 +7109,8 @@ def sample_set_machine_resources(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetMachineResourcesInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetMachineResourcesInstanceRequest): request = compute.SetMachineResourcesInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -7344,8 +7268,8 @@ def sample_set_machine_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_machine_type_request_resource] ) @@ -7355,10 +7279,8 @@ def sample_set_machine_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetMachineTypeInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetMachineTypeInstanceRequest): request = compute.SetMachineTypeInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -7491,8 +7413,8 @@ def sample_set_machine_type(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_machine_type_request_resource] ) @@ -7502,10 +7424,8 @@ def sample_set_machine_type(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetMachineTypeInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetMachineTypeInstanceRequest): request = compute.SetMachineTypeInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -7661,8 +7581,8 @@ def sample_set_metadata(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, metadata_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -7670,10 +7590,8 @@ def sample_set_metadata(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetMetadataInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetMetadataInstanceRequest): request = compute.SetMetadataInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -7802,8 +7720,8 @@ def sample_set_metadata(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, metadata_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -7811,10 +7729,8 @@ def sample_set_metadata(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetMetadataInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetMetadataInstanceRequest): request = compute.SetMetadataInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -7972,8 +7888,8 @@ def sample_set_min_cpu_platform(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_min_cpu_platform_request_resource] ) @@ -7983,10 +7899,8 @@ def sample_set_min_cpu_platform(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetMinCpuPlatformInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetMinCpuPlatformInstanceRequest): request = compute.SetMinCpuPlatformInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -8121,8 +8035,8 @@ def sample_set_min_cpu_platform(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_min_cpu_platform_request_resource] ) @@ -8132,10 +8046,8 @@ def sample_set_min_cpu_platform(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetMinCpuPlatformInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetMinCpuPlatformInstanceRequest): request = compute.SetMinCpuPlatformInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -8290,8 +8202,8 @@ def sample_set_name(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_name_request_resource] ) @@ -8301,10 +8213,8 @@ def sample_set_name(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetNameInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetNameInstanceRequest): request = compute.SetNameInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -8434,8 +8344,8 @@ def sample_set_name(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_name_request_resource] ) @@ -8445,10 +8355,8 @@ def sample_set_name(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetNameInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetNameInstanceRequest): request = compute.SetNameInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -8606,8 +8514,8 @@ def sample_set_scheduling(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, scheduling_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -8615,10 +8523,8 @@ def sample_set_scheduling(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSchedulingInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSchedulingInstanceRequest): request = compute.SetSchedulingInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -8749,8 +8655,8 @@ def sample_set_scheduling(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, scheduling_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -8758,10 +8664,8 @@ def sample_set_scheduling(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSchedulingInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSchedulingInstanceRequest): request = compute.SetSchedulingInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -8919,8 +8823,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_security_policy_request_resource] ) @@ -8930,10 +8834,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSecurityPolicyInstanceRequest): request = compute.SetSecurityPolicyInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -9068,8 +8970,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_security_policy_request_resource] ) @@ -9079,10 +8981,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSecurityPolicyInstanceRequest): request = compute.SetSecurityPolicyInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -9241,8 +9141,8 @@ def sample_set_service_account(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_service_account_request_resource] ) @@ -9252,10 +9152,8 @@ def sample_set_service_account(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetServiceAccountInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetServiceAccountInstanceRequest): request = compute.SetServiceAccountInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -9389,8 +9287,8 @@ def sample_set_service_account(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, instances_set_service_account_request_resource] ) @@ -9400,10 +9298,8 @@ def sample_set_service_account(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetServiceAccountInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetServiceAccountInstanceRequest): request = compute.SetServiceAccountInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -9565,8 +9461,8 @@ def sample_set_shielded_instance_integrity_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, shielded_instance_integrity_policy_resource] ) @@ -9576,10 +9472,8 @@ def sample_set_shielded_instance_integrity_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetShieldedInstanceIntegrityPolicyInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetShieldedInstanceIntegrityPolicyInstanceRequest ): @@ -9720,8 +9614,8 @@ def sample_set_shielded_instance_integrity_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, shielded_instance_integrity_policy_resource] ) @@ -9731,10 +9625,8 @@ def sample_set_shielded_instance_integrity_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetShieldedInstanceIntegrityPolicyInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetShieldedInstanceIntegrityPolicyInstanceRequest ): @@ -9894,8 +9786,8 @@ def sample_set_tags(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, tags_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -9903,10 +9795,8 @@ def sample_set_tags(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTagsInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetTagsInstanceRequest): request = compute.SetTagsInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -10035,8 +9925,8 @@ def sample_set_tags(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, tags_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -10044,10 +9934,8 @@ def sample_set_tags(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTagsInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetTagsInstanceRequest): request = compute.SetTagsInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -10197,8 +10085,8 @@ def sample_simulate_maintenance_event(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -10206,10 +10094,8 @@ def sample_simulate_maintenance_event(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SimulateMaintenanceEventInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SimulateMaintenanceEventInstanceRequest): request = compute.SimulateMaintenanceEventInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -10334,8 +10220,8 @@ def sample_simulate_maintenance_event(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -10343,10 +10229,8 @@ def sample_simulate_maintenance_event(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SimulateMaintenanceEventInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SimulateMaintenanceEventInstanceRequest): request = compute.SimulateMaintenanceEventInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -10495,8 +10379,8 @@ def sample_start(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -10504,10 +10388,8 @@ def sample_start(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StartInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StartInstanceRequest): request = compute.StartInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -10629,8 +10511,8 @@ def sample_start(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -10638,10 +10520,8 @@ def sample_start(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StartInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StartInstanceRequest): request = compute.StartInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -10798,8 +10678,8 @@ def sample_start_with_encryption_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -10814,10 +10694,8 @@ def sample_start_with_encryption_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StartWithEncryptionKeyInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StartWithEncryptionKeyInstanceRequest): request = compute.StartWithEncryptionKeyInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -10955,8 +10833,8 @@ def sample_start_with_encryption_key(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -10971,10 +10849,8 @@ def sample_start_with_encryption_key(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StartWithEncryptionKeyInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StartWithEncryptionKeyInstanceRequest): request = compute.StartWithEncryptionKeyInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -11130,8 +11006,8 @@ def sample_stop(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -11139,10 +11015,8 @@ def sample_stop(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopInstanceRequest): request = compute.StopInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -11267,8 +11141,8 @@ def sample_stop(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -11276,10 +11150,8 @@ def sample_stop(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopInstanceRequest): request = compute.StopInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -11432,8 +11304,8 @@ def sample_suspend(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -11441,10 +11313,8 @@ def sample_suspend(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SuspendInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SuspendInstanceRequest): request = compute.SuspendInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -11572,8 +11442,8 @@ def sample_suspend(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance]) if request is not None and has_flattened_params: raise ValueError( @@ -11581,10 +11451,8 @@ def sample_suspend(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SuspendInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SuspendInstanceRequest): request = compute.SuspendInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -11738,8 +11606,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, test_permissions_request_resource] ) @@ -11749,10 +11617,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsInstanceRequest): request = compute.TestIamPermissionsInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -11885,8 +11751,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, instance_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -11894,10 +11760,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateInstanceRequest): request = compute.UpdateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -12028,8 +11892,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, instance_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -12037,10 +11901,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateInstanceRequest): request = compute.UpdateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -12205,8 +12067,8 @@ def sample_update_access_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, network_interface, access_config_resource] ) @@ -12216,10 +12078,8 @@ def sample_update_access_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateAccessConfigInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateAccessConfigInstanceRequest): request = compute.UpdateAccessConfigInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -12361,8 +12221,8 @@ def sample_update_access_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, network_interface, access_config_resource] ) @@ -12372,10 +12232,8 @@ def sample_update_access_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateAccessConfigInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateAccessConfigInstanceRequest): request = compute.UpdateAccessConfigInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -12535,8 +12393,8 @@ def sample_update_display_device(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, display_device_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -12544,10 +12402,8 @@ def sample_update_display_device(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateDisplayDeviceInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateDisplayDeviceInstanceRequest): request = compute.UpdateDisplayDeviceInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -12680,8 +12536,8 @@ def sample_update_display_device(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instance, display_device_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -12689,10 +12545,8 @@ def sample_update_display_device(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateDisplayDeviceInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateDisplayDeviceInstanceRequest): request = compute.UpdateDisplayDeviceInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -12860,8 +12714,8 @@ def sample_update_network_interface(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, network_interface, network_interface_resource] ) @@ -12871,10 +12725,8 @@ def sample_update_network_interface(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateNetworkInterfaceInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateNetworkInterfaceInstanceRequest): request = compute.UpdateNetworkInterfaceInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -13019,8 +12871,8 @@ def sample_update_network_interface(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, network_interface, network_interface_resource] ) @@ -13030,10 +12882,8 @@ def sample_update_network_interface(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateNetworkInterfaceInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateNetworkInterfaceInstanceRequest): request = compute.UpdateNetworkInterfaceInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -13195,8 +13045,8 @@ def sample_update_shielded_instance_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, shielded_instance_config_resource] ) @@ -13206,10 +13056,8 @@ def sample_update_shielded_instance_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateShieldedInstanceConfigInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateShieldedInstanceConfigInstanceRequest): request = compute.UpdateShieldedInstanceConfigInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -13348,8 +13196,8 @@ def sample_update_shielded_instance_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, instance, shielded_instance_config_resource] ) @@ -13359,10 +13207,8 @@ def sample_update_shielded_instance_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateShieldedInstanceConfigInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateShieldedInstanceConfigInstanceRequest): request = compute.UpdateShieldedInstanceConfigInstanceRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py index 202e50211225..bbad313a1bf1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshots/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InstantSnapshotsTransport]] = None, + transport: Optional[ + Union[ + str, InstantSnapshotsTransport, Callable[..., InstantSnapshotsTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InstantSnapshotsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InstantSnapshotsTransport,Callable[..., InstantSnapshotsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstantSnapshotsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InstantSnapshotsTransport], + Callable[..., InstantSnapshotsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstantSnapshotsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -711,8 +726,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -720,10 +735,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListInstantSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListInstantSnapshotsRequest): request = compute.AggregatedListInstantSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -849,8 +862,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instant_snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -858,10 +871,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstantSnapshotRequest): request = compute.DeleteInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -988,8 +999,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instant_snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -997,10 +1008,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInstantSnapshotRequest): request = compute.DeleteInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1147,8 +1156,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instant_snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -1156,10 +1165,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInstantSnapshotRequest): request = compute.GetInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1302,8 +1309,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1311,10 +1318,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyInstantSnapshotRequest): request = compute.GetIamPolicyInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1429,8 +1434,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instant_snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1438,10 +1443,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstantSnapshotRequest): request = compute.InsertInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1555,8 +1558,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, instant_snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1564,10 +1567,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInstantSnapshotRequest): request = compute.InsertInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1707,8 +1708,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -1716,10 +1717,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInstantSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInstantSnapshotsRequest): request = compute.ListInstantSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1874,8 +1873,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_policy_request_resource] ) @@ -1885,10 +1884,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyInstantSnapshotRequest): request = compute.SetIamPolicyInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2020,8 +2017,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_labels_request_resource] ) @@ -2031,10 +2028,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsInstantSnapshotRequest): request = compute.SetLabelsInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2166,8 +2161,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_labels_request_resource] ) @@ -2177,10 +2172,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsInstantSnapshotRequest): request = compute.SetLabelsInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2338,8 +2331,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, test_permissions_request_resource] ) @@ -2349,10 +2342,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsInstantSnapshotRequest): request = compute.TestIamPermissionsInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py index c4e11a47402e..c3031a468ebb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_attachments/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InterconnectAttachmentsTransport]] = None, + transport: Optional[ + Union[ + str, + InterconnectAttachmentsTransport, + Callable[..., InterconnectAttachmentsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InterconnectAttachmentsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InterconnectAttachmentsTransport,Callable[..., InterconnectAttachmentsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InterconnectAttachmentsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -636,8 +645,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InterconnectAttachmentsTransport], + Callable[..., InterconnectAttachmentsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InterconnectAttachmentsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -715,8 +732,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -724,10 +741,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListInterconnectAttachmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AggregatedListInterconnectAttachmentsRequest ): @@ -848,8 +863,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, interconnect_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -857,10 +872,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInterconnectAttachmentRequest): request = compute.DeleteInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -980,8 +993,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, interconnect_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -989,10 +1002,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInterconnectAttachmentRequest): request = compute.DeleteInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1141,8 +1152,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, interconnect_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -1150,10 +1161,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInterconnectAttachmentRequest): request = compute.GetInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1273,8 +1282,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, interconnect_attachment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1282,10 +1291,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInterconnectAttachmentRequest): request = compute.InsertInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1406,8 +1413,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, interconnect_attachment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1415,10 +1422,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInterconnectAttachmentRequest): request = compute.InsertInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1560,8 +1565,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1569,10 +1574,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInterconnectAttachmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInterconnectAttachmentsRequest): request = compute.ListInterconnectAttachmentsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1711,8 +1714,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, interconnect_attachment, interconnect_attachment_resource] ) @@ -1722,10 +1725,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchInterconnectAttachmentRequest): request = compute.PatchInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1862,8 +1863,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, interconnect_attachment, interconnect_attachment_resource] ) @@ -1873,10 +1874,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchInterconnectAttachmentRequest): request = compute.PatchInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2035,8 +2034,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -2046,10 +2045,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsInterconnectAttachmentRequest): request = compute.SetLabelsInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2183,8 +2180,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -2194,10 +2191,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsInterconnectAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsInterconnectAttachmentRequest): request = compute.SetLabelsInterconnectAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py index 0f6a00336377..a0a139a187e6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_locations/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -505,7 +506,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InterconnectLocationsTransport]] = None, + transport: Optional[ + Union[ + str, + InterconnectLocationsTransport, + Callable[..., InterconnectLocationsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -517,9 +524,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InterconnectLocationsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InterconnectLocationsTransport,Callable[..., InterconnectLocationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InterconnectLocationsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -631,8 +640,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InterconnectLocationsTransport], + Callable[..., InterconnectLocationsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InterconnectLocationsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -720,8 +737,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect_location]) if request is not None and has_flattened_params: raise ValueError( @@ -729,10 +746,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInterconnectLocationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInterconnectLocationRequest): request = compute.GetInterconnectLocationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -836,8 +851,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -845,10 +860,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInterconnectLocationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInterconnectLocationsRequest): request = compute.ListInterconnectLocationsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py index 595a938ef923..65359679c76f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnect_remote_locations/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -507,7 +508,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InterconnectRemoteLocationsTransport]] = None, + transport: Optional[ + Union[ + str, + InterconnectRemoteLocationsTransport, + Callable[..., InterconnectRemoteLocationsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -519,9 +526,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InterconnectRemoteLocationsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InterconnectRemoteLocationsTransport,Callable[..., InterconnectRemoteLocationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InterconnectRemoteLocationsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -635,8 +644,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InterconnectRemoteLocationsTransport], + Callable[..., InterconnectRemoteLocationsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., InterconnectRemoteLocationsTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -724,8 +743,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect_remote_location]) if request is not None and has_flattened_params: raise ValueError( @@ -733,10 +752,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInterconnectRemoteLocationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInterconnectRemoteLocationRequest): request = compute.GetInterconnectRemoteLocationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -845,8 +862,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -854,10 +871,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInterconnectRemoteLocationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInterconnectRemoteLocationsRequest): request = compute.ListInterconnectRemoteLocationsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py index 21d7a627667d..fae7283b29f4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/interconnects/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, InterconnectsTransport]] = None, + transport: Optional[ + Union[str, InterconnectsTransport, Callable[..., InterconnectsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, InterconnectsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,InterconnectsTransport,Callable[..., InterconnectsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InterconnectsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -630,8 +635,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[InterconnectsTransport], Callable[..., InterconnectsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InterconnectsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -710,8 +722,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect]) if request is not None and has_flattened_params: raise ValueError( @@ -719,10 +731,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInterconnectRequest): request = compute.DeleteInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -828,8 +838,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect]) if request is not None and has_flattened_params: raise ValueError( @@ -837,10 +847,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteInterconnectRequest): request = compute.DeleteInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -975,8 +983,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect]) if request is not None and has_flattened_params: raise ValueError( @@ -984,10 +992,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetInterconnectRequest): request = compute.GetInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1103,8 +1109,8 @@ def sample_get_diagnostics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect]) if request is not None and has_flattened_params: raise ValueError( @@ -1112,10 +1118,8 @@ def sample_get_diagnostics(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetDiagnosticsInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetDiagnosticsInterconnectRequest): request = compute.GetDiagnosticsInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1226,8 +1230,8 @@ def sample_get_macsec_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect]) if request is not None and has_flattened_params: raise ValueError( @@ -1235,10 +1239,8 @@ def sample_get_macsec_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetMacsecConfigInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetMacsecConfigInterconnectRequest): request = compute.GetMacsecConfigInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1344,8 +1346,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1353,10 +1355,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInterconnectRequest): request = compute.InsertInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1457,8 +1457,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1466,10 +1466,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertInterconnectRequest): request = compute.InsertInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1592,8 +1590,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1601,10 +1599,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInterconnectsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInterconnectsRequest): request = compute.ListInterconnectsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1721,8 +1717,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect, interconnect_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1730,10 +1726,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchInterconnectRequest): request = compute.PatchInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1850,8 +1844,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, interconnect, interconnect_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1859,10 +1853,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchInterconnectRequest): request = compute.PatchInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2005,8 +1997,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -2016,10 +2008,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsInterconnectRequest): request = compute.SetLabelsInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2140,8 +2130,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -2151,10 +2141,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsInterconnectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsInterconnectRequest): request = compute.SetLabelsInterconnectRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py index 67abf623f889..6a2003e8c5bd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/license_codes/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -500,7 +501,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LicenseCodesTransport]] = None, + transport: Optional[ + Union[str, LicenseCodesTransport, Callable[..., LicenseCodesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -512,9 +515,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, LicenseCodesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,LicenseCodesTransport,Callable[..., LicenseCodesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicenseCodesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -623,8 +628,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[LicenseCodesTransport], Callable[..., LicenseCodesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LicenseCodesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -711,8 +723,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, license_code]) if request is not None and has_flattened_params: raise ValueError( @@ -720,10 +732,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetLicenseCodeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetLicenseCodeRequest): request = compute.GetLicenseCodeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -841,8 +851,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -852,10 +862,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsLicenseCodeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsLicenseCodeRequest): request = compute.TestIamPermissionsLicenseCodeRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py index c04982b7a594..29b6c37b1453 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/licenses/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LicensesTransport]] = None, + transport: Optional[ + Union[str, LicensesTransport, Callable[..., LicensesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, LicensesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,LicensesTransport,Callable[..., LicensesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LicensesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[LicensesTransport], Callable[..., LicensesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LicensesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -711,8 +723,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, license_]) if request is not None and has_flattened_params: raise ValueError( @@ -720,10 +732,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteLicenseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteLicenseRequest): request = compute.DeleteLicenseRequest(request) # If we have keyword arguments corresponding to fields on the @@ -833,8 +843,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, license_]) if request is not None and has_flattened_params: raise ValueError( @@ -842,10 +852,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteLicenseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteLicenseRequest): request = compute.DeleteLicenseRequest(request) # If we have keyword arguments corresponding to fields on the @@ -981,8 +989,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, license_]) if request is not None and has_flattened_params: raise ValueError( @@ -990,10 +998,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetLicenseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetLicenseRequest): request = compute.GetLicenseRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1124,8 +1130,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1133,10 +1139,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyLicenseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyLicenseRequest): request = compute.GetIamPolicyLicenseRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1243,8 +1247,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, license_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1252,10 +1256,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertLicenseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertLicenseRequest): request = compute.InsertLicenseRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1357,8 +1359,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, license_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1366,10 +1368,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertLicenseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertLicenseRequest): request = compute.InsertLicenseRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1496,8 +1496,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1505,10 +1505,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListLicensesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListLicensesRequest): request = compute.ListLicensesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1649,8 +1647,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_policy_request_resource] ) @@ -1660,10 +1658,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyLicenseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyLicenseRequest): request = compute.SetIamPolicyLicenseRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1783,8 +1779,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -1794,10 +1790,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsLicenseRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsLicenseRequest): request = compute.TestIamPermissionsLicenseRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py index 92b69fb7b8fd..90baf8c28fa8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_images/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MachineImagesTransport]] = None, + transport: Optional[ + Union[str, MachineImagesTransport, Callable[..., MachineImagesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, MachineImagesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MachineImagesTransport,Callable[..., MachineImagesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MachineImagesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -630,8 +635,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[MachineImagesTransport], Callable[..., MachineImagesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MachineImagesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -713,8 +725,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, machine_image]) if request is not None and has_flattened_params: raise ValueError( @@ -722,10 +734,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteMachineImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteMachineImageRequest): request = compute.DeleteMachineImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -834,8 +844,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, machine_image]) if request is not None and has_flattened_params: raise ValueError( @@ -843,10 +853,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteMachineImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteMachineImageRequest): request = compute.DeleteMachineImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -982,8 +990,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, machine_image]) if request is not None and has_flattened_params: raise ValueError( @@ -991,10 +999,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetMachineImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetMachineImageRequest): request = compute.GetMachineImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1123,8 +1129,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1132,10 +1138,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyMachineImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyMachineImageRequest): request = compute.GetIamPolicyMachineImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1245,8 +1249,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, machine_image_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1254,10 +1258,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertMachineImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertMachineImageRequest): request = compute.InsertMachineImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1362,8 +1364,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, machine_image_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1371,10 +1373,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertMachineImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertMachineImageRequest): request = compute.InsertMachineImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1497,8 +1497,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1506,10 +1506,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListMachineImagesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListMachineImagesRequest): request = compute.ListMachineImagesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1648,8 +1646,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_policy_request_resource] ) @@ -1659,10 +1657,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyMachineImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyMachineImageRequest): request = compute.SetIamPolicyMachineImageRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1783,8 +1779,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -1794,10 +1790,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsMachineImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsMachineImageRequest): request = compute.TestIamPermissionsMachineImageRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py index 53deadbff28c..2418b636aa3a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/machine_types/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -501,7 +502,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MachineTypesTransport]] = None, + transport: Optional[ + Union[str, MachineTypesTransport, Callable[..., MachineTypesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -513,9 +516,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, MachineTypesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MachineTypesTransport,Callable[..., MachineTypesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MachineTypesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -624,8 +629,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[MachineTypesTransport], Callable[..., MachineTypesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MachineTypesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -703,8 +715,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -712,10 +724,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListMachineTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListMachineTypesRequest): request = compute.AggregatedListMachineTypesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -835,8 +845,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, machine_type]) if request is not None and has_flattened_params: raise ValueError( @@ -844,10 +854,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetMachineTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetMachineTypeRequest): request = compute.GetMachineTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -963,8 +971,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -972,10 +980,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListMachineTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListMachineTypesRequest): request = compute.ListMachineTypesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py index def5fee8ac8a..6b9385f42ea3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_attachments/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NetworkAttachmentsTransport]] = None, + transport: Optional[ + Union[ + str, + NetworkAttachmentsTransport, + Callable[..., NetworkAttachmentsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, NetworkAttachmentsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,NetworkAttachmentsTransport,Callable[..., NetworkAttachmentsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the NetworkAttachmentsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[NetworkAttachmentsTransport], + Callable[..., NetworkAttachmentsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., NetworkAttachmentsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -714,8 +731,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -723,10 +740,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListNetworkAttachmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListNetworkAttachmentsRequest): request = compute.AggregatedListNetworkAttachmentsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -844,8 +859,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -853,10 +868,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkAttachmentRequest): request = compute.DeleteNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -975,8 +988,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -984,10 +997,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkAttachmentRequest): request = compute.DeleteNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1131,8 +1142,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -1140,10 +1151,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNetworkAttachmentRequest): request = compute.GetNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1286,8 +1295,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1295,10 +1304,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyNetworkAttachmentRequest): request = compute.GetIamPolicyNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1415,8 +1422,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_attachment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1424,10 +1431,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkAttachmentRequest): request = compute.InsertNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1543,8 +1548,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_attachment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1552,10 +1557,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkAttachmentRequest): request = compute.InsertNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1691,8 +1694,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1700,10 +1703,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNetworkAttachmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListNetworkAttachmentsRequest): request = compute.ListNetworkAttachmentsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1836,8 +1837,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, network_attachment, network_attachment_resource] ) @@ -1847,10 +1848,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNetworkAttachmentRequest): request = compute.PatchNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1979,8 +1978,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, network_attachment, network_attachment_resource] ) @@ -1990,10 +1989,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNetworkAttachmentRequest): request = compute.PatchNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2171,8 +2168,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -2182,10 +2179,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyNetworkAttachmentRequest): request = compute.SetIamPolicyNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2318,8 +2313,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -2329,10 +2324,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsNetworkAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsNetworkAttachmentRequest): request = compute.TestIamPermissionsNetworkAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py index 67a08b432ec9..8055d2ee58f3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_edge_security_services/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -510,7 +511,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NetworkEdgeSecurityServicesTransport]] = None, + transport: Optional[ + Union[ + str, + NetworkEdgeSecurityServicesTransport, + Callable[..., NetworkEdgeSecurityServicesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -522,9 +529,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, NetworkEdgeSecurityServicesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,NetworkEdgeSecurityServicesTransport,Callable[..., NetworkEdgeSecurityServicesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the NetworkEdgeSecurityServicesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -638,8 +647,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[NetworkEdgeSecurityServicesTransport], + Callable[..., NetworkEdgeSecurityServicesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., NetworkEdgeSecurityServicesTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -720,8 +739,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -729,10 +748,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListNetworkEdgeSecurityServicesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AggregatedListNetworkEdgeSecurityServicesRequest ): @@ -855,8 +872,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_edge_security_service]) if request is not None and has_flattened_params: raise ValueError( @@ -864,10 +881,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkEdgeSecurityServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkEdgeSecurityServiceRequest): request = compute.DeleteNetworkEdgeSecurityServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -992,8 +1007,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_edge_security_service]) if request is not None and has_flattened_params: raise ValueError( @@ -1001,10 +1016,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkEdgeSecurityServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkEdgeSecurityServiceRequest): request = compute.DeleteNetworkEdgeSecurityServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1154,8 +1167,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_edge_security_service]) if request is not None and has_flattened_params: raise ValueError( @@ -1163,10 +1176,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNetworkEdgeSecurityServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNetworkEdgeSecurityServiceRequest): request = compute.GetNetworkEdgeSecurityServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1291,8 +1302,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, network_edge_security_service_resource] ) @@ -1302,10 +1313,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkEdgeSecurityServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkEdgeSecurityServiceRequest): request = compute.InsertNetworkEdgeSecurityServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1428,8 +1437,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, network_edge_security_service_resource] ) @@ -1439,10 +1448,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkEdgeSecurityServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkEdgeSecurityServiceRequest): request = compute.InsertNetworkEdgeSecurityServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1599,8 +1606,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1615,10 +1622,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNetworkEdgeSecurityServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNetworkEdgeSecurityServiceRequest): request = compute.PatchNetworkEdgeSecurityServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1756,8 +1761,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1772,10 +1777,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNetworkEdgeSecurityServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNetworkEdgeSecurityServiceRequest): request = compute.PatchNetworkEdgeSecurityServiceRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py index 05c6c724628a..6c27bc508734 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_endpoint_groups/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NetworkEndpointGroupsTransport]] = None, + transport: Optional[ + Union[ + str, + NetworkEndpointGroupsTransport, + Callable[..., NetworkEndpointGroupsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, NetworkEndpointGroupsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,NetworkEndpointGroupsTransport,Callable[..., NetworkEndpointGroupsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the NetworkEndpointGroupsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -634,8 +643,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[NetworkEndpointGroupsTransport], + Callable[..., NetworkEndpointGroupsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., NetworkEndpointGroupsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -713,8 +730,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -722,10 +739,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListNetworkEndpointGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListNetworkEndpointGroupsRequest): request = compute.AggregatedListNetworkEndpointGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -858,8 +873,8 @@ def sample_attach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -874,10 +889,8 @@ def sample_attach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AttachNetworkEndpointsNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AttachNetworkEndpointsNetworkEndpointGroupRequest ): @@ -1017,8 +1030,8 @@ def sample_attach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1033,10 +1046,8 @@ def sample_attach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AttachNetworkEndpointsNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AttachNetworkEndpointsNetworkEndpointGroupRequest ): @@ -1195,8 +1206,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1204,10 +1215,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkEndpointGroupRequest): request = compute.DeleteNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1335,8 +1344,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1344,10 +1353,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkEndpointGroupRequest): request = compute.DeleteNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1506,8 +1513,8 @@ def sample_detach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1522,10 +1529,8 @@ def sample_detach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DetachNetworkEndpointsNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DetachNetworkEndpointsNetworkEndpointGroupRequest ): @@ -1665,8 +1670,8 @@ def sample_detach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1681,10 +1686,8 @@ def sample_detach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DetachNetworkEndpointsNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DetachNetworkEndpointsNetworkEndpointGroupRequest ): @@ -1842,8 +1845,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1851,10 +1854,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNetworkEndpointGroupRequest): request = compute.GetNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1976,8 +1977,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, network_endpoint_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1985,10 +1986,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkEndpointGroupRequest): request = compute.InsertNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2111,8 +2110,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, network_endpoint_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2120,10 +2119,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkEndpointGroupRequest): request = compute.InsertNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2264,8 +2261,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -2273,10 +2270,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNetworkEndpointGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListNetworkEndpointGroupsRequest): request = compute.ListNetworkEndpointGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2418,8 +2413,8 @@ def sample_list_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2434,10 +2429,8 @@ def sample_list_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNetworkEndpointsNetworkEndpointGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListNetworkEndpointsNetworkEndpointGroupsRequest ): @@ -2581,8 +2574,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, test_permissions_request_resource] ) @@ -2592,10 +2585,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.TestIamPermissionsNetworkEndpointGroupRequest ): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py index dd3b27cffafe..5b9ffdfd8177 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/network_firewall_policies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NetworkFirewallPoliciesTransport]] = None, + transport: Optional[ + Union[ + str, + NetworkFirewallPoliciesTransport, + Callable[..., NetworkFirewallPoliciesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, NetworkFirewallPoliciesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,NetworkFirewallPoliciesTransport,Callable[..., NetworkFirewallPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the NetworkFirewallPoliciesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -636,8 +645,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[NetworkFirewallPoliciesTransport], + Callable[..., NetworkFirewallPoliciesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., NetworkFirewallPoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -729,8 +746,8 @@ def sample_add_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, firewall_policy, firewall_policy_association_resource] ) @@ -740,10 +757,8 @@ def sample_add_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddAssociationNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddAssociationNetworkFirewallPolicyRequest): request = compute.AddAssociationNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -866,8 +881,8 @@ def sample_add_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, firewall_policy, firewall_policy_association_resource] ) @@ -877,10 +892,8 @@ def sample_add_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddAssociationNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddAssociationNetworkFirewallPolicyRequest): request = compute.AddAssociationNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1024,8 +1037,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, firewall_policy, firewall_policy_rule_resource] ) @@ -1035,10 +1048,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleNetworkFirewallPolicyRequest): request = compute.AddRuleNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1156,8 +1167,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, firewall_policy, firewall_policy_rule_resource] ) @@ -1167,10 +1178,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleNetworkFirewallPolicyRequest): request = compute.AddRuleNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1306,8 +1315,8 @@ def sample_clone_rules(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1315,10 +1324,8 @@ def sample_clone_rules(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CloneRulesNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CloneRulesNetworkFirewallPolicyRequest): request = compute.CloneRulesNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1428,8 +1435,8 @@ def sample_clone_rules(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1437,10 +1444,8 @@ def sample_clone_rules(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CloneRulesNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CloneRulesNetworkFirewallPolicyRequest): request = compute.CloneRulesNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1574,8 +1579,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1583,10 +1588,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkFirewallPolicyRequest): request = compute.DeleteNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1696,8 +1699,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1705,10 +1708,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkFirewallPolicyRequest): request = compute.DeleteNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1838,8 +1839,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1847,10 +1848,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNetworkFirewallPolicyRequest): request = compute.GetNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1958,8 +1957,8 @@ def sample_get_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1967,10 +1966,8 @@ def sample_get_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetAssociationNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetAssociationNetworkFirewallPolicyRequest): request = compute.GetAssociationNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2101,8 +2098,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2110,10 +2107,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyNetworkFirewallPolicyRequest): request = compute.GetIamPolicyNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2225,8 +2220,8 @@ def sample_get_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -2234,10 +2229,8 @@ def sample_get_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRuleNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRuleNetworkFirewallPolicyRequest): request = compute.GetRuleNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2345,8 +2338,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2354,10 +2347,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkFirewallPolicyRequest): request = compute.InsertNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2460,8 +2451,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2469,10 +2460,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkFirewallPolicyRequest): request = compute.InsertNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2595,8 +2584,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -2604,10 +2593,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNetworkFirewallPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListNetworkFirewallPoliciesRequest): request = compute.ListNetworkFirewallPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2726,8 +2713,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2735,10 +2722,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNetworkFirewallPolicyRequest): request = compute.PatchNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2857,8 +2842,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2866,10 +2851,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNetworkFirewallPolicyRequest): request = compute.PatchNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3011,8 +2994,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, firewall_policy, firewall_policy_rule_resource] ) @@ -3022,10 +3005,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleNetworkFirewallPolicyRequest): request = compute.PatchRuleNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3143,8 +3124,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, firewall_policy, firewall_policy_rule_resource] ) @@ -3154,10 +3135,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleNetworkFirewallPolicyRequest): request = compute.PatchRuleNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3294,8 +3273,8 @@ def sample_remove_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3303,10 +3282,8 @@ def sample_remove_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveAssociationNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RemoveAssociationNetworkFirewallPolicyRequest ): @@ -3419,8 +3396,8 @@ def sample_remove_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3428,10 +3405,8 @@ def sample_remove_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveAssociationNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RemoveAssociationNetworkFirewallPolicyRequest ): @@ -3567,8 +3542,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3576,10 +3551,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveRuleNetworkFirewallPolicyRequest): request = compute.RemoveRuleNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3689,8 +3662,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3698,10 +3671,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveRuleNetworkFirewallPolicyRequest): request = compute.RemoveRuleNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3864,8 +3835,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_policy_request_resource] ) @@ -3875,10 +3846,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyNetworkFirewallPolicyRequest): request = compute.SetIamPolicyNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3999,8 +3968,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -4010,10 +3979,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.TestIamPermissionsNetworkFirewallPolicyRequest ): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py index d649b9920c7c..64f79349fabe 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/networks/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NetworksTransport]] = None, + transport: Optional[ + Union[str, NetworksTransport, Callable[..., NetworksTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, NetworksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,NetworksTransport,Callable[..., NetworksTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the NetworksTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[NetworksTransport], Callable[..., NetworksTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., NetworksTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -717,8 +729,8 @@ def sample_add_peering(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, network, networks_add_peering_request_resource] ) @@ -728,10 +740,8 @@ def sample_add_peering(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddPeeringNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddPeeringNetworkRequest): request = compute.AddPeeringNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -851,8 +861,8 @@ def sample_add_peering(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, network, networks_add_peering_request_resource] ) @@ -862,10 +872,8 @@ def sample_add_peering(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddPeeringNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddPeeringNetworkRequest): request = compute.AddPeeringNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -999,8 +1007,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network]) if request is not None and has_flattened_params: raise ValueError( @@ -1008,10 +1016,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkRequest): request = compute.DeleteNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1117,8 +1123,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network]) if request is not None and has_flattened_params: raise ValueError( @@ -1126,10 +1132,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNetworkRequest): request = compute.DeleteNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1261,8 +1265,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network]) if request is not None and has_flattened_params: raise ValueError( @@ -1270,10 +1274,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNetworkRequest): request = compute.GetNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1379,8 +1381,8 @@ def sample_get_effective_firewalls(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network]) if request is not None and has_flattened_params: raise ValueError( @@ -1388,10 +1390,8 @@ def sample_get_effective_firewalls(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetEffectiveFirewallsNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetEffectiveFirewallsNetworkRequest): request = compute.GetEffectiveFirewallsNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1497,8 +1497,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1506,10 +1506,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkRequest): request = compute.InsertNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1610,8 +1608,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1619,10 +1617,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNetworkRequest): request = compute.InsertNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1744,8 +1740,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1753,10 +1749,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNetworksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListNetworksRequest): request = compute.ListNetworksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1867,8 +1861,8 @@ def sample_list_peering_routes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network]) if request is not None and has_flattened_params: raise ValueError( @@ -1876,10 +1870,8 @@ def sample_list_peering_routes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListPeeringRoutesNetworksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListPeeringRoutesNetworksRequest): request = compute.ListPeeringRoutesNetworksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2001,8 +1993,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network, network_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2010,10 +2002,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNetworkRequest): request = compute.PatchNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2128,8 +2118,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network, network_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2137,10 +2127,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNetworkRequest): request = compute.PatchNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2282,8 +2270,8 @@ def sample_remove_peering(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, network, networks_remove_peering_request_resource] ) @@ -2293,10 +2281,8 @@ def sample_remove_peering(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemovePeeringNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemovePeeringNetworkRequest): request = compute.RemovePeeringNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2416,8 +2402,8 @@ def sample_remove_peering(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, network, networks_remove_peering_request_resource] ) @@ -2427,10 +2413,8 @@ def sample_remove_peering(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemovePeeringNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemovePeeringNetworkRequest): request = compute.RemovePeeringNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2565,8 +2549,8 @@ def sample_switch_to_custom_mode(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network]) if request is not None and has_flattened_params: raise ValueError( @@ -2574,10 +2558,8 @@ def sample_switch_to_custom_mode(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SwitchToCustomModeNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SwitchToCustomModeNetworkRequest): request = compute.SwitchToCustomModeNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2684,8 +2666,8 @@ def sample_switch_to_custom_mode(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, network]) if request is not None and has_flattened_params: raise ValueError( @@ -2693,10 +2675,8 @@ def sample_switch_to_custom_mode(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SwitchToCustomModeNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SwitchToCustomModeNetworkRequest): request = compute.SwitchToCustomModeNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2839,8 +2819,8 @@ def sample_update_peering(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, network, networks_update_peering_request_resource] ) @@ -2850,10 +2830,8 @@ def sample_update_peering(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdatePeeringNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdatePeeringNetworkRequest): request = compute.UpdatePeeringNetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2976,8 +2954,8 @@ def sample_update_peering(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, network, networks_update_peering_request_resource] ) @@ -2987,10 +2965,8 @@ def sample_update_peering(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdatePeeringNetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdatePeeringNetworkRequest): request = compute.UpdatePeeringNetworkRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py index a90756315dfd..fccb15bf9728 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NodeGroupsTransport]] = None, + transport: Optional[ + Union[str, NodeGroupsTransport, Callable[..., NodeGroupsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, NodeGroupsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,NodeGroupsTransport,Callable[..., NodeGroupsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the NodeGroupsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[NodeGroupsTransport], Callable[..., NodeGroupsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., NodeGroupsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -724,8 +736,8 @@ def sample_add_nodes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, node_group, node_groups_add_nodes_request_resource] ) @@ -735,10 +747,8 @@ def sample_add_nodes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddNodesNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddNodesNodeGroupRequest): request = compute.AddNodesNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -868,8 +878,8 @@ def sample_add_nodes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, node_group, node_groups_add_nodes_request_resource] ) @@ -879,10 +889,8 @@ def sample_add_nodes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddNodesNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddNodesNodeGroupRequest): request = compute.AddNodesNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1018,8 +1026,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1027,10 +1035,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListNodeGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListNodeGroupsRequest): request = compute.AggregatedListNodeGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1149,8 +1155,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, node_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1158,10 +1164,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNodeGroupRequest): request = compute.DeleteNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1281,8 +1285,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, node_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1290,10 +1294,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNodeGroupRequest): request = compute.DeleteNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1446,8 +1448,8 @@ def sample_delete_nodes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, node_group, node_groups_delete_nodes_request_resource] ) @@ -1457,10 +1459,8 @@ def sample_delete_nodes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNodesNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNodesNodeGroupRequest): request = compute.DeleteNodesNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1592,8 +1592,8 @@ def sample_delete_nodes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, node_group, node_groups_delete_nodes_request_resource] ) @@ -1603,10 +1603,8 @@ def sample_delete_nodes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNodesNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNodesNodeGroupRequest): request = compute.DeleteNodesNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1764,8 +1762,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, node_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1773,10 +1771,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNodeGroupRequest): request = compute.GetNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1917,8 +1913,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1926,10 +1922,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyNodeGroupRequest): request = compute.GetIamPolicyNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2056,8 +2050,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, initial_node_count, node_group_resource] ) @@ -2067,10 +2061,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNodeGroupRequest): request = compute.InsertNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2198,8 +2190,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, initial_node_count, node_group_resource] ) @@ -2209,10 +2201,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNodeGroupRequest): request = compute.InsertNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2355,8 +2345,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -2364,10 +2354,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNodeGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListNodeGroupsRequest): request = compute.ListNodeGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2495,8 +2483,8 @@ def sample_list_nodes(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, node_group]) if request is not None and has_flattened_params: raise ValueError( @@ -2504,10 +2492,8 @@ def sample_list_nodes(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNodesNodeGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListNodesNodeGroupsRequest): request = compute.ListNodesNodeGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2642,8 +2628,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, node_group, node_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2651,10 +2637,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNodeGroupRequest): request = compute.PatchNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2782,8 +2766,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, node_group, node_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2791,10 +2775,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchNodeGroupRequest): request = compute.PatchNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2863,6 +2845,335 @@ def error_code(self): # Done; return the response. return response + def perform_maintenance_unary( + self, + request: Optional[ + Union[compute.PerformMaintenanceNodeGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_perform_maintenance_request_resource: Optional[ + compute.NodeGroupsPerformMaintenanceRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Perform maintenance on a subset of nodes in the node + group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_perform_maintenance(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.PerformMaintenanceNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.perform_maintenance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PerformMaintenanceNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.PerformMaintenance. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the node group scoping this + request. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_perform_maintenance_request_resource (google.cloud.compute_v1.types.NodeGroupsPerformMaintenanceRequest): + The body resource for this request + This corresponds to the ``node_groups_perform_maintenance_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + project, + zone, + node_group, + node_groups_perform_maintenance_request_resource, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.PerformMaintenanceNodeGroupRequest): + request = compute.PerformMaintenanceNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_perform_maintenance_request_resource is not None: + request.node_groups_perform_maintenance_request_resource = ( + node_groups_perform_maintenance_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.perform_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def perform_maintenance( + self, + request: Optional[ + Union[compute.PerformMaintenanceNodeGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + node_group: Optional[str] = None, + node_groups_perform_maintenance_request_resource: Optional[ + compute.NodeGroupsPerformMaintenanceRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Perform maintenance on a subset of nodes in the node + group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_perform_maintenance(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.PerformMaintenanceNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.perform_maintenance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PerformMaintenanceNodeGroupRequest, dict]): + The request object. A request message for + NodeGroups.PerformMaintenance. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_group (str): + Name of the node group scoping this + request. + + This corresponds to the ``node_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + node_groups_perform_maintenance_request_resource (google.cloud.compute_v1.types.NodeGroupsPerformMaintenanceRequest): + The body resource for this request + This corresponds to the ``node_groups_perform_maintenance_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [ + project, + zone, + node_group, + node_groups_perform_maintenance_request_resource, + ] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.PerformMaintenanceNodeGroupRequest): + request = compute.PerformMaintenanceNodeGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if node_group is not None: + request.node_group = node_group + if node_groups_perform_maintenance_request_resource is not None: + request.node_groups_perform_maintenance_request_resource = ( + node_groups_perform_maintenance_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.perform_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("node_group", request.node_group), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def set_iam_policy( self, request: Optional[Union[compute.SetIamPolicyNodeGroupRequest, dict]] = None, @@ -2968,8 +3279,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_policy_request_resource] ) @@ -2979,10 +3290,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyNodeGroupRequest): request = compute.SetIamPolicyNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3114,8 +3423,8 @@ def sample_set_node_template(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, node_group, node_groups_set_node_template_request_resource] ) @@ -3125,10 +3434,8 @@ def sample_set_node_template(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetNodeTemplateNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetNodeTemplateNodeGroupRequest): request = compute.SetNodeTemplateNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3260,8 +3567,8 @@ def sample_set_node_template(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, node_group, node_groups_set_node_template_request_resource] ) @@ -3271,10 +3578,8 @@ def sample_set_node_template(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetNodeTemplateNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetNodeTemplateNodeGroupRequest): request = compute.SetNodeTemplateNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3435,8 +3740,8 @@ def sample_simulate_maintenance_event(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -3451,10 +3756,8 @@ def sample_simulate_maintenance_event(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SimulateMaintenanceEventNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SimulateMaintenanceEventNodeGroupRequest): request = compute.SimulateMaintenanceEventNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3592,8 +3895,8 @@ def sample_simulate_maintenance_event(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -3608,10 +3911,8 @@ def sample_simulate_maintenance_event(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SimulateMaintenanceEventNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SimulateMaintenanceEventNodeGroupRequest): request = compute.SimulateMaintenanceEventNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3771,8 +4072,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, test_permissions_request_resource] ) @@ -3782,10 +4083,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsNodeGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsNodeGroupRequest): request = compute.TestIamPermissionsNodeGroupRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/base.py index 228adb13caff..229a406ee164 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/base.py @@ -181,6 +181,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.perform_maintenance: gapic_v1.method.wrap_method( + self.perform_maintenance, + default_timeout=None, + client_info=client_info, + ), self.set_iam_policy: gapic_v1.method.wrap_method( self.set_iam_policy, default_timeout=None, @@ -304,6 +309,15 @@ def patch( ]: raise NotImplementedError() + @property + def perform_maintenance( + self, + ) -> Callable[ + [compute.PerformMaintenanceNodeGroupRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def set_iam_policy( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py index 5fea2d7dd6d1..eb1c484aabe0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_groups/transports/rest.py @@ -143,6 +143,14 @@ def post_patch(self, response): logging.log(f"Received response: {response}") return response + def pre_perform_maintenance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_perform_maintenance(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_iam_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -393,6 +401,29 @@ def post_patch(self, response: compute.Operation) -> compute.Operation: """ return response + def pre_perform_maintenance( + self, + request: compute.PerformMaintenanceNodeGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.PerformMaintenanceNodeGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for perform_maintenance + + Override in a subclass to manipulate the request or metadata + before they are sent to the NodeGroups server. + """ + return request, metadata + + def post_perform_maintenance( + self, response: compute.Operation + ) -> compute.Operation: + """Post-rpc interceptor for perform_maintenance + + Override in a subclass to manipulate the response + after it is returned by the NodeGroups server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy( self, request: compute.SetIamPolicyNodeGroupRequest, @@ -1585,6 +1616,117 @@ def __call__( resp = self._interceptor.post_patch(resp) return resp + class _PerformMaintenance(NodeGroupsRestStub): + def __hash__(self): + return hash("PerformMaintenance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.PerformMaintenanceNodeGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the perform maintenance method over HTTP. + + Args: + request (~.compute.PerformMaintenanceNodeGroupRequest): + The request object. A request message for + NodeGroups.PerformMaintenance. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + Note that completed Operation resources have a limited + retention period. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/performMaintenance", + "body": "node_groups_perform_maintenance_request_resource", + }, + ] + request, metadata = self._interceptor.pre_perform_maintenance( + request, metadata + ) + pb_request = compute.PerformMaintenanceNodeGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_perform_maintenance(resp) + return resp + class _SetIamPolicy(NodeGroupsRestStub): def __hash__(self): return hash("SetIamPolicy") @@ -2089,6 +2231,14 @@ def patch(self) -> Callable[[compute.PatchNodeGroupRequest], compute.Operation]: # In C++ this would require a dynamic_cast return self._Patch(self._session, self._host, self._interceptor) # type: ignore + @property + def perform_maintenance( + self, + ) -> Callable[[compute.PerformMaintenanceNodeGroupRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._PerformMaintenance(self._session, self._host, self._interceptor) # type: ignore + @property def set_iam_policy( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py index 90f577330a3b..29f307b18bfd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_templates/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NodeTemplatesTransport]] = None, + transport: Optional[ + Union[str, NodeTemplatesTransport, Callable[..., NodeTemplatesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, NodeTemplatesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,NodeTemplatesTransport,Callable[..., NodeTemplatesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the NodeTemplatesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -630,8 +635,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[NodeTemplatesTransport], Callable[..., NodeTemplatesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., NodeTemplatesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -709,8 +721,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -718,10 +730,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListNodeTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListNodeTemplatesRequest): request = compute.AggregatedListNodeTemplatesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -840,8 +850,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, node_template]) if request is not None and has_flattened_params: raise ValueError( @@ -849,10 +859,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNodeTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNodeTemplateRequest): request = compute.DeleteNodeTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -972,8 +980,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, node_template]) if request is not None and has_flattened_params: raise ValueError( @@ -981,10 +989,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteNodeTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteNodeTemplateRequest): request = compute.DeleteNodeTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1130,8 +1136,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, node_template]) if request is not None and has_flattened_params: raise ValueError( @@ -1139,10 +1145,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNodeTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNodeTemplateRequest): request = compute.GetNodeTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1283,8 +1287,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1292,10 +1296,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyNodeTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyNodeTemplateRequest): request = compute.GetIamPolicyNodeTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1413,8 +1415,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, node_template_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1422,10 +1424,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNodeTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNodeTemplateRequest): request = compute.InsertNodeTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1542,8 +1542,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, node_template_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1551,10 +1551,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertNodeTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertNodeTemplateRequest): request = compute.InsertNodeTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1694,8 +1692,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1703,10 +1701,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNodeTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListNodeTemplatesRequest): request = compute.ListNodeTemplatesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1861,8 +1857,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -1872,10 +1868,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyNodeTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyNodeTemplateRequest): request = compute.SetIamPolicyNodeTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2008,8 +2002,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -2019,10 +2013,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsNodeTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsNodeTemplateRequest): request = compute.TestIamPermissionsNodeTemplateRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py index 29facf60746c..241af0c30697 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/node_types/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -501,7 +502,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, NodeTypesTransport]] = None, + transport: Optional[ + Union[str, NodeTypesTransport, Callable[..., NodeTypesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -513,9 +516,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, NodeTypesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,NodeTypesTransport,Callable[..., NodeTypesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the NodeTypesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -624,8 +629,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[NodeTypesTransport], Callable[..., NodeTypesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., NodeTypesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -701,8 +713,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -710,10 +722,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListNodeTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListNodeTypesRequest): request = compute.AggregatedListNodeTypesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -837,8 +847,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, node_type]) if request is not None and has_flattened_params: raise ValueError( @@ -846,10 +856,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNodeTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNodeTypeRequest): request = compute.GetNodeTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -964,8 +972,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -973,10 +981,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNodeTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListNodeTypesRequest): request = compute.ListNodeTypesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py index 444021b052f7..a5fd94f51b3a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/packet_mirrorings/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PacketMirroringsTransport]] = None, + transport: Optional[ + Union[ + str, PacketMirroringsTransport, Callable[..., PacketMirroringsTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, PacketMirroringsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,PacketMirroringsTransport,Callable[..., PacketMirroringsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PacketMirroringsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[PacketMirroringsTransport], + Callable[..., PacketMirroringsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PacketMirroringsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -713,8 +728,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -722,10 +737,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListPacketMirroringsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListPacketMirroringsRequest): request = compute.AggregatedListPacketMirroringsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -842,8 +855,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, packet_mirroring]) if request is not None and has_flattened_params: raise ValueError( @@ -851,10 +864,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePacketMirroringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeletePacketMirroringRequest): request = compute.DeletePacketMirroringRequest(request) # If we have keyword arguments corresponding to fields on the @@ -972,8 +983,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, packet_mirroring]) if request is not None and has_flattened_params: raise ValueError( @@ -981,10 +992,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePacketMirroringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeletePacketMirroringRequest): request = compute.DeletePacketMirroringRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1135,8 +1144,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, packet_mirroring]) if request is not None and has_flattened_params: raise ValueError( @@ -1144,10 +1153,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetPacketMirroringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetPacketMirroringRequest): request = compute.GetPacketMirroringRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1264,8 +1271,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, packet_mirroring_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1273,10 +1280,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertPacketMirroringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertPacketMirroringRequest): request = compute.InsertPacketMirroringRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1392,8 +1397,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, packet_mirroring_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1401,10 +1406,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertPacketMirroringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertPacketMirroringRequest): request = compute.InsertPacketMirroringRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1542,8 +1545,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1551,10 +1554,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListPacketMirroringsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListPacketMirroringsRequest): request = compute.ListPacketMirroringsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1687,8 +1688,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, packet_mirroring, packet_mirroring_resource] ) @@ -1698,10 +1699,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPacketMirroringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchPacketMirroringRequest): request = compute.PatchPacketMirroringRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1830,8 +1829,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, packet_mirroring, packet_mirroring_resource] ) @@ -1841,10 +1840,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPacketMirroringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchPacketMirroringRequest): request = compute.PatchPacketMirroringRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2000,8 +1997,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -2011,10 +2008,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsPacketMirroringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsPacketMirroringRequest): request = compute.TestIamPermissionsPacketMirroringRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py index 9edec43ab36e..c035ab4a42cc 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ProjectsTransport]] = None, + transport: Optional[ + Union[str, ProjectsTransport, Callable[..., ProjectsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ProjectsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ProjectsTransport,Callable[..., ProjectsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ProjectsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ProjectsTransport], Callable[..., ProjectsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ProjectsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -700,8 +712,8 @@ def sample_disable_xpn_host(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -709,10 +721,8 @@ def sample_disable_xpn_host(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DisableXpnHostProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DisableXpnHostProjectRequest): request = compute.DisableXpnHostProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -804,8 +814,8 @@ def sample_disable_xpn_host(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -813,10 +823,8 @@ def sample_disable_xpn_host(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DisableXpnHostProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DisableXpnHostProjectRequest): request = compute.DisableXpnHostProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -941,8 +949,8 @@ def sample_disable_xpn_resource(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, projects_disable_xpn_resource_request_resource] ) @@ -952,10 +960,8 @@ def sample_disable_xpn_resource(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DisableXpnResourceProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DisableXpnResourceProjectRequest): request = compute.DisableXpnResourceProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1060,8 +1066,8 @@ def sample_disable_xpn_resource(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, projects_disable_xpn_resource_request_resource] ) @@ -1071,10 +1077,8 @@ def sample_disable_xpn_resource(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DisableXpnResourceProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DisableXpnResourceProjectRequest): request = compute.DisableXpnResourceProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1194,8 +1198,8 @@ def sample_enable_xpn_host(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1203,10 +1207,8 @@ def sample_enable_xpn_host(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.EnableXpnHostProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.EnableXpnHostProjectRequest): request = compute.EnableXpnHostProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1298,8 +1300,8 @@ def sample_enable_xpn_host(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1307,10 +1309,8 @@ def sample_enable_xpn_host(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.EnableXpnHostProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.EnableXpnHostProjectRequest): request = compute.EnableXpnHostProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1436,8 +1436,8 @@ def sample_enable_xpn_resource(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, projects_enable_xpn_resource_request_resource] ) @@ -1447,10 +1447,8 @@ def sample_enable_xpn_resource(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.EnableXpnResourceProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.EnableXpnResourceProjectRequest): request = compute.EnableXpnResourceProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1556,8 +1554,8 @@ def sample_enable_xpn_resource(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, projects_enable_xpn_resource_request_resource] ) @@ -1567,10 +1565,8 @@ def sample_enable_xpn_resource(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.EnableXpnResourceProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.EnableXpnResourceProjectRequest): request = compute.EnableXpnResourceProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1700,8 +1696,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1709,10 +1705,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetProjectRequest): request = compute.GetProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1808,8 +1802,8 @@ def sample_get_xpn_host(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1817,10 +1811,8 @@ def sample_get_xpn_host(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetXpnHostProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetXpnHostProjectRequest): request = compute.GetXpnHostProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1915,8 +1907,8 @@ def sample_get_xpn_resources(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1924,10 +1916,8 @@ def sample_get_xpn_resources(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetXpnResourcesProjectsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetXpnResourcesProjectsRequest): request = compute.GetXpnResourcesProjectsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2039,8 +2029,8 @@ def sample_list_xpn_hosts(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, projects_list_xpn_hosts_request_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2048,10 +2038,8 @@ def sample_list_xpn_hosts(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListXpnHostsProjectsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListXpnHostsProjectsRequest): request = compute.ListXpnHostsProjectsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2162,8 +2150,8 @@ def sample_move_disk(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, disk_move_request_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2171,10 +2159,8 @@ def sample_move_disk(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveDiskProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveDiskProjectRequest): request = compute.MoveDiskProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2274,8 +2260,8 @@ def sample_move_disk(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, disk_move_request_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2283,10 +2269,8 @@ def sample_move_disk(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveDiskProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveDiskProjectRequest): request = compute.MoveDiskProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2418,8 +2402,8 @@ def sample_move_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, instance_move_request_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2427,10 +2411,8 @@ def sample_move_instance(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveInstanceProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveInstanceProjectRequest): request = compute.MoveInstanceProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2538,8 +2520,8 @@ def sample_move_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, instance_move_request_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2547,10 +2529,8 @@ def sample_move_instance(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.MoveInstanceProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.MoveInstanceProjectRequest): request = compute.MoveInstanceProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2680,8 +2660,8 @@ def sample_set_cloud_armor_tier(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, projects_set_cloud_armor_tier_request_resource] ) @@ -2691,10 +2671,8 @@ def sample_set_cloud_armor_tier(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetCloudArmorTierProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetCloudArmorTierProjectRequest): request = compute.SetCloudArmorTierProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2802,8 +2780,8 @@ def sample_set_cloud_armor_tier(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, projects_set_cloud_armor_tier_request_resource] ) @@ -2813,10 +2791,8 @@ def sample_set_cloud_armor_tier(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetCloudArmorTierProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetCloudArmorTierProjectRequest): request = compute.SetCloudArmorTierProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2946,8 +2922,8 @@ def sample_set_common_instance_metadata(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, metadata_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2955,10 +2931,8 @@ def sample_set_common_instance_metadata(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetCommonInstanceMetadataProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetCommonInstanceMetadataProjectRequest): request = compute.SetCommonInstanceMetadataProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3064,8 +3038,8 @@ def sample_set_common_instance_metadata(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, metadata_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3073,10 +3047,8 @@ def sample_set_common_instance_metadata(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetCommonInstanceMetadataProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetCommonInstanceMetadataProjectRequest): request = compute.SetCommonInstanceMetadataProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3209,8 +3181,8 @@ def sample_set_default_network_tier(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, projects_set_default_network_tier_request_resource] ) @@ -3220,10 +3192,8 @@ def sample_set_default_network_tier(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetDefaultNetworkTierProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetDefaultNetworkTierProjectRequest): request = compute.SetDefaultNetworkTierProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3332,8 +3302,8 @@ def sample_set_default_network_tier(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, projects_set_default_network_tier_request_resource] ) @@ -3343,10 +3313,8 @@ def sample_set_default_network_tier(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetDefaultNetworkTierProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetDefaultNetworkTierProjectRequest): request = compute.SetDefaultNetworkTierProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3477,8 +3445,8 @@ def sample_set_usage_export_bucket(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, usage_export_location_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3486,10 +3454,8 @@ def sample_set_usage_export_bucket(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUsageExportBucketProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUsageExportBucketProjectRequest): request = compute.SetUsageExportBucketProjectRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3594,8 +3560,8 @@ def sample_set_usage_export_bucket(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, usage_export_location_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3603,10 +3569,8 @@ def sample_set_usage_export_bucket(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUsageExportBucketProjectRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUsageExportBucketProjectRequest): request = compute.SetUsageExportBucketProjectRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py index 60825e46a80e..588f02f8f1d7 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_advertised_prefixes/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PublicAdvertisedPrefixesTransport]] = None, + transport: Optional[ + Union[ + str, + PublicAdvertisedPrefixesTransport, + Callable[..., PublicAdvertisedPrefixesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, PublicAdvertisedPrefixesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,PublicAdvertisedPrefixesTransport,Callable[..., PublicAdvertisedPrefixesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PublicAdvertisedPrefixesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -636,8 +645,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[PublicAdvertisedPrefixesTransport], + Callable[..., PublicAdvertisedPrefixesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PublicAdvertisedPrefixesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -720,8 +737,8 @@ def sample_announce(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -729,10 +746,8 @@ def sample_announce(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AnnouncePublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AnnouncePublicAdvertisedPrefixeRequest): request = compute.AnnouncePublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -842,8 +857,8 @@ def sample_announce(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -851,10 +866,8 @@ def sample_announce(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AnnouncePublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AnnouncePublicAdvertisedPrefixeRequest): request = compute.AnnouncePublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -988,8 +1001,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -997,10 +1010,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeletePublicAdvertisedPrefixeRequest): request = compute.DeletePublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1110,8 +1121,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -1119,10 +1130,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeletePublicAdvertisedPrefixeRequest): request = compute.DeletePublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1261,8 +1270,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -1270,10 +1279,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetPublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetPublicAdvertisedPrefixeRequest): request = compute.GetPublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1384,8 +1391,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1393,10 +1400,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertPublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertPublicAdvertisedPrefixeRequest): request = compute.InsertPublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1504,8 +1509,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1513,10 +1518,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertPublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertPublicAdvertisedPrefixeRequest): request = compute.InsertPublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1640,8 +1643,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1649,10 +1652,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListPublicAdvertisedPrefixesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListPublicAdvertisedPrefixesRequest): request = compute.ListPublicAdvertisedPrefixesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1775,8 +1776,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, public_advertised_prefix, public_advertised_prefix_resource] ) @@ -1786,10 +1787,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchPublicAdvertisedPrefixeRequest): request = compute.PatchPublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1914,8 +1913,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, public_advertised_prefix, public_advertised_prefix_resource] ) @@ -1925,10 +1924,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchPublicAdvertisedPrefixeRequest): request = compute.PatchPublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2066,8 +2063,8 @@ def sample_withdraw(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -2075,10 +2072,8 @@ def sample_withdraw(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.WithdrawPublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.WithdrawPublicAdvertisedPrefixeRequest): request = compute.WithdrawPublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2188,8 +2183,8 @@ def sample_withdraw(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, public_advertised_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -2197,10 +2192,8 @@ def sample_withdraw(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.WithdrawPublicAdvertisedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.WithdrawPublicAdvertisedPrefixeRequest): request = compute.WithdrawPublicAdvertisedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py index 2579dac01b57..16efee253477 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/public_delegated_prefixes/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PublicDelegatedPrefixesTransport]] = None, + transport: Optional[ + Union[ + str, + PublicDelegatedPrefixesTransport, + Callable[..., PublicDelegatedPrefixesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, PublicDelegatedPrefixesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,PublicDelegatedPrefixesTransport,Callable[..., PublicDelegatedPrefixesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PublicDelegatedPrefixesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -636,8 +645,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[PublicDelegatedPrefixesTransport], + Callable[..., PublicDelegatedPrefixesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PublicDelegatedPrefixesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -717,8 +734,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -726,10 +743,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListPublicDelegatedPrefixesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AggregatedListPublicDelegatedPrefixesRequest ): @@ -854,8 +869,8 @@ def sample_announce(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -863,10 +878,8 @@ def sample_announce(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AnnouncePublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AnnouncePublicDelegatedPrefixeRequest): request = compute.AnnouncePublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -990,8 +1003,8 @@ def sample_announce(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -999,10 +1012,8 @@ def sample_announce(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AnnouncePublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AnnouncePublicDelegatedPrefixeRequest): request = compute.AnnouncePublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1148,8 +1159,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -1157,10 +1168,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeletePublicDelegatedPrefixeRequest): request = compute.DeletePublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1281,8 +1290,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -1290,10 +1299,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeletePublicDelegatedPrefixeRequest): request = compute.DeletePublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1444,8 +1451,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -1453,10 +1460,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetPublicDelegatedPrefixeRequest): request = compute.GetPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1577,8 +1582,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1586,10 +1591,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertPublicDelegatedPrefixeRequest): request = compute.InsertPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1711,8 +1714,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1720,10 +1723,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertPublicDelegatedPrefixeRequest): request = compute.InsertPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1863,8 +1864,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1872,10 +1873,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListPublicDelegatedPrefixesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListPublicDelegatedPrefixesRequest): request = compute.ListPublicDelegatedPrefixesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2012,8 +2011,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, public_delegated_prefix, public_delegated_prefix_resource] ) @@ -2023,10 +2022,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchPublicDelegatedPrefixeRequest): request = compute.PatchPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2161,8 +2158,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, public_delegated_prefix, public_delegated_prefix_resource] ) @@ -2172,10 +2169,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchPublicDelegatedPrefixeRequest): request = compute.PatchPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2328,8 +2323,8 @@ def sample_withdraw(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -2337,10 +2332,8 @@ def sample_withdraw(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.WithdrawPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.WithdrawPublicDelegatedPrefixeRequest): request = compute.WithdrawPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2464,8 +2457,8 @@ def sample_withdraw(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, public_delegated_prefix]) if request is not None and has_flattened_params: raise ValueError( @@ -2473,10 +2466,8 @@ def sample_withdraw(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.WithdrawPublicDelegatedPrefixeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.WithdrawPublicDelegatedPrefixeRequest): request = compute.WithdrawPublicDelegatedPrefixeRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py index 7c6eb725c0b4..8e8d64685376 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_autoscalers/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionAutoscalersTransport]] = None, + transport: Optional[ + Union[ + str, + RegionAutoscalersTransport, + Callable[..., RegionAutoscalersTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionAutoscalersTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionAutoscalersTransport,Callable[..., RegionAutoscalersTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionAutoscalersTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionAutoscalersTransport], + Callable[..., RegionAutoscalersTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionAutoscalersTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -721,8 +738,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler]) if request is not None and has_flattened_params: raise ValueError( @@ -730,10 +747,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionAutoscalerRequest): request = compute.DeleteRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -851,8 +866,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler]) if request is not None and has_flattened_params: raise ValueError( @@ -860,10 +875,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionAutoscalerRequest): request = compute.DeleteRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1015,8 +1028,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler]) if request is not None and has_flattened_params: raise ValueError( @@ -1024,10 +1037,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionAutoscalerRequest): request = compute.GetRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1145,8 +1156,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1154,10 +1165,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionAutoscalerRequest): request = compute.InsertRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1274,8 +1283,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1283,10 +1292,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionAutoscalerRequest): request = compute.InsertRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1426,8 +1433,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1435,10 +1442,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionAutoscalersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionAutoscalersRequest): request = compute.ListRegionAutoscalersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1564,8 +1569,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1573,10 +1578,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionAutoscalerRequest): request = compute.PatchRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1695,8 +1698,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1704,10 +1707,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionAutoscalerRequest): request = compute.PatchRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1849,8 +1850,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1858,10 +1859,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionAutoscalerRequest): request = compute.UpdateRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1978,8 +1977,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, autoscaler_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1987,10 +1986,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionAutoscalerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionAutoscalerRequest): request = compute.UpdateRegionAutoscalerRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py index 92912a3d48f3..64d2101eb1bd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_services/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionBackendServicesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionBackendServicesTransport, + Callable[..., RegionBackendServicesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionBackendServicesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionBackendServicesTransport,Callable[..., RegionBackendServicesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionBackendServicesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -634,8 +643,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionBackendServicesTransport], + Callable[..., RegionBackendServicesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionBackendServicesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -728,8 +745,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, backend_service]) if request is not None and has_flattened_params: raise ValueError( @@ -737,10 +754,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionBackendServiceRequest): request = compute.DeleteRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -863,8 +878,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, backend_service]) if request is not None and has_flattened_params: raise ValueError( @@ -872,10 +887,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionBackendServiceRequest): request = compute.DeleteRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1034,8 +1047,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, backend_service]) if request is not None and has_flattened_params: raise ValueError( @@ -1043,10 +1056,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionBackendServiceRequest): request = compute.GetRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1175,8 +1186,8 @@ def sample_get_health(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, backend_service, resource_group_reference_resource] ) @@ -1186,10 +1197,8 @@ def sample_get_health(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetHealthRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetHealthRegionBackendServiceRequest): request = compute.GetHealthRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1336,8 +1345,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1345,10 +1354,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyRegionBackendServiceRequest): request = compute.GetIamPolicyRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1470,8 +1477,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, backend_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1479,10 +1486,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionBackendServiceRequest): request = compute.InsertRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1603,8 +1608,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, backend_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1612,10 +1617,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionBackendServiceRequest): request = compute.InsertRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1756,8 +1759,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1765,10 +1768,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionBackendServicesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionBackendServicesRequest): request = compute.ListRegionBackendServicesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1893,8 +1894,8 @@ def sample_list_usable(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1902,10 +1903,8 @@ def sample_list_usable(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListUsableRegionBackendServicesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListUsableRegionBackendServicesRequest): request = compute.ListUsableRegionBackendServicesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2041,8 +2040,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, backend_service, backend_service_resource] ) @@ -2052,10 +2051,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionBackendServiceRequest): request = compute.PatchRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2187,8 +2184,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, backend_service, backend_service_resource] ) @@ -2198,10 +2195,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionBackendServiceRequest): request = compute.PatchRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2379,8 +2374,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -2390,10 +2385,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyRegionBackendServiceRequest): request = compute.SetIamPolicyRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2530,8 +2523,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, backend_service, security_policy_reference_resource] ) @@ -2541,10 +2534,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetSecurityPolicyRegionBackendServiceRequest ): @@ -2683,8 +2674,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, backend_service, security_policy_reference_resource] ) @@ -2694,10 +2685,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetSecurityPolicyRegionBackendServiceRequest ): @@ -2857,8 +2846,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -2868,10 +2857,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.TestIamPermissionsRegionBackendServiceRequest ): @@ -3007,8 +2994,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, backend_service, backend_service_resource] ) @@ -3018,10 +3005,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionBackendServiceRequest): request = compute.UpdateRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3153,8 +3138,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, backend_service, backend_service_resource] ) @@ -3164,10 +3149,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionBackendServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionBackendServiceRequest): request = compute.UpdateRegionBackendServiceRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py index 93c17a34e13d..62d7614881e8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_commitments/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionCommitmentsTransport]] = None, + transport: Optional[ + Union[ + str, + RegionCommitmentsTransport, + Callable[..., RegionCommitmentsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionCommitmentsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionCommitmentsTransport,Callable[..., RegionCommitmentsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionCommitmentsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionCommitmentsTransport], + Callable[..., RegionCommitmentsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionCommitmentsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -711,8 +728,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -720,10 +737,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListRegionCommitmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListRegionCommitmentsRequest): request = compute.AggregatedListRegionCommitmentsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -845,8 +860,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, commitment]) if request is not None and has_flattened_params: raise ValueError( @@ -854,10 +869,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionCommitmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionCommitmentRequest): request = compute.GetRegionCommitmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -973,8 +986,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, commitment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -982,10 +995,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionCommitmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionCommitmentRequest): request = compute.InsertRegionCommitmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1100,8 +1111,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, commitment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1109,10 +1120,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionCommitmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionCommitmentRequest): request = compute.InsertRegionCommitmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1250,8 +1259,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1259,10 +1268,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionCommitmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionCommitmentsRequest): request = compute.ListRegionCommitmentsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1395,8 +1402,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, commitment, commitment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1404,10 +1411,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionCommitmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionCommitmentRequest): request = compute.UpdateRegionCommitmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1536,8 +1541,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, commitment, commitment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1545,10 +1550,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionCommitmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionCommitmentRequest): request = compute.UpdateRegionCommitmentRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py index 34bcf49d4919..67288a617489 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disk_types/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -503,7 +504,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionDiskTypesTransport]] = None, + transport: Optional[ + Union[ + str, RegionDiskTypesTransport, Callable[..., RegionDiskTypesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -515,9 +520,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionDiskTypesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionDiskTypesTransport,Callable[..., RegionDiskTypesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionDiskTypesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -629,8 +636,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionDiskTypesTransport], Callable[..., RegionDiskTypesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionDiskTypesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -727,8 +741,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk_type]) if request is not None and has_flattened_params: raise ValueError( @@ -736,10 +750,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionDiskTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionDiskTypeRequest): request = compute.GetRegionDiskTypeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -853,8 +865,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -862,10 +874,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionDiskTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionDiskTypesRequest): request = compute.ListRegionDiskTypesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py index c0bb882852b6..698537671807 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionDisksTransport]] = None, + transport: Optional[ + Union[str, RegionDisksTransport, Callable[..., RegionDisksTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionDisksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionDisksTransport,Callable[..., RegionDisksTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionDisksTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionDisksTransport], Callable[..., RegionDisksTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionDisksTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -728,8 +740,8 @@ def sample_add_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, disk, region_disks_add_resource_policies_request_resource] ) @@ -739,10 +751,8 @@ def sample_add_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddResourcePoliciesRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddResourcePoliciesRegionDiskRequest): request = compute.AddResourcePoliciesRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -876,8 +886,8 @@ def sample_add_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, disk, region_disks_add_resource_policies_request_resource] ) @@ -887,10 +897,8 @@ def sample_add_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddResourcePoliciesRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddResourcePoliciesRegionDiskRequest): request = compute.AddResourcePoliciesRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1038,8 +1046,8 @@ def sample_bulk_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, bulk_insert_disk_resource_resource] ) @@ -1049,10 +1057,8 @@ def sample_bulk_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.BulkInsertRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.BulkInsertRegionDiskRequest): request = compute.BulkInsertRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1172,8 +1178,8 @@ def sample_bulk_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, bulk_insert_disk_resource_resource] ) @@ -1183,10 +1189,8 @@ def sample_bulk_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.BulkInsertRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.BulkInsertRegionDiskRequest): request = compute.BulkInsertRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1340,8 +1344,8 @@ def sample_create_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk, snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1349,10 +1353,8 @@ def sample_create_snapshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CreateSnapshotRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CreateSnapshotRegionDiskRequest): request = compute.CreateSnapshotRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1482,8 +1484,8 @@ def sample_create_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk, snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1491,10 +1493,8 @@ def sample_create_snapshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CreateSnapshotRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.CreateSnapshotRegionDiskRequest): request = compute.CreateSnapshotRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1643,8 +1643,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -1652,10 +1652,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionDiskRequest): request = compute.DeleteRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1777,8 +1775,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -1786,10 +1784,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionDiskRequest): request = compute.DeleteRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1942,8 +1938,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -1951,10 +1947,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionDiskRequest): request = compute.GetRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2095,8 +2089,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2104,10 +2098,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyRegionDiskRequest): request = compute.GetIamPolicyRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2223,8 +2215,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2232,10 +2224,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionDiskRequest): request = compute.InsertRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2350,8 +2340,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2359,10 +2349,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionDiskRequest): request = compute.InsertRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2500,8 +2488,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -2509,10 +2497,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionDisksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionDisksRequest): request = compute.ListRegionDisksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2646,8 +2632,8 @@ def sample_remove_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2662,10 +2648,8 @@ def sample_remove_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveResourcePoliciesRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveResourcePoliciesRegionDiskRequest): request = compute.RemoveResourcePoliciesRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2797,8 +2781,8 @@ def sample_remove_resource_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2813,10 +2797,8 @@ def sample_remove_resource_policies(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveResourcePoliciesRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveResourcePoliciesRegionDiskRequest): request = compute.RemoveResourcePoliciesRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2969,8 +2951,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, disk, region_disks_resize_request_resource] ) @@ -2980,10 +2962,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeRegionDiskRequest): request = compute.ResizeRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3111,8 +3091,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, disk, region_disks_resize_request_resource] ) @@ -3122,10 +3102,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeRegionDiskRequest): request = compute.ResizeRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3303,8 +3281,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -3314,10 +3292,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyRegionDiskRequest): request = compute.SetIamPolicyRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3447,8 +3423,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -3458,10 +3434,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsRegionDiskRequest): request = compute.SetLabelsRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3591,8 +3565,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -3602,10 +3576,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsRegionDiskRequest): request = compute.SetLabelsRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3763,8 +3735,8 @@ def sample_start_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -3779,10 +3751,8 @@ def sample_start_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StartAsyncReplicationRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StartAsyncReplicationRegionDiskRequest): request = compute.StartAsyncReplicationRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3915,8 +3885,8 @@ def sample_start_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -3931,10 +3901,8 @@ def sample_start_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StartAsyncReplicationRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StartAsyncReplicationRegionDiskRequest): request = compute.StartAsyncReplicationRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4084,8 +4052,8 @@ def sample_stop_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -4093,10 +4061,8 @@ def sample_stop_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopAsyncReplicationRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopAsyncReplicationRegionDiskRequest): request = compute.StopAsyncReplicationRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4217,8 +4183,8 @@ def sample_stop_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk]) if request is not None and has_flattened_params: raise ValueError( @@ -4226,10 +4192,8 @@ def sample_stop_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopAsyncReplicationRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopAsyncReplicationRegionDiskRequest): request = compute.StopAsyncReplicationRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4379,8 +4343,8 @@ def sample_stop_group_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, disks_stop_group_async_replication_resource_resource] ) @@ -4390,10 +4354,8 @@ def sample_stop_group_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopGroupAsyncReplicationRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopGroupAsyncReplicationRegionDiskRequest): request = compute.StopGroupAsyncReplicationRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4521,8 +4483,8 @@ def sample_stop_group_async_replication(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, disks_stop_group_async_replication_resource_resource] ) @@ -4532,10 +4494,8 @@ def sample_stop_group_async_replication(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.StopGroupAsyncReplicationRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.StopGroupAsyncReplicationRegionDiskRequest): request = compute.StopGroupAsyncReplicationRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4692,8 +4652,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -4703,10 +4663,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsRegionDiskRequest): request = compute.TestIamPermissionsRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4837,8 +4795,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk, disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -4846,10 +4804,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionDiskRequest): request = compute.UpdateRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4978,8 +4934,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, disk, disk_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -4987,10 +4943,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionDiskRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionDiskRequest): request = compute.UpdateRegionDiskRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py index 2a82cc8b2623..4406c254f136 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_check_services/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionHealthCheckServicesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionHealthCheckServicesTransport, + Callable[..., RegionHealthCheckServicesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionHealthCheckServicesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionHealthCheckServicesTransport,Callable[..., RegionHealthCheckServicesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionHealthCheckServicesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -636,8 +645,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionHealthCheckServicesTransport], + Callable[..., RegionHealthCheckServicesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionHealthCheckServicesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -730,8 +747,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check_service]) if request is not None and has_flattened_params: raise ValueError( @@ -739,10 +756,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionHealthCheckServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionHealthCheckServiceRequest): request = compute.DeleteRegionHealthCheckServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -865,8 +880,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check_service]) if request is not None and has_flattened_params: raise ValueError( @@ -874,10 +889,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionHealthCheckServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionHealthCheckServiceRequest): request = compute.DeleteRegionHealthCheckServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1026,8 +1039,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check_service]) if request is not None and has_flattened_params: raise ValueError( @@ -1035,10 +1048,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionHealthCheckServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionHealthCheckServiceRequest): request = compute.GetRegionHealthCheckServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1159,8 +1170,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1168,10 +1179,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionHealthCheckServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionHealthCheckServiceRequest): request = compute.InsertRegionHealthCheckServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1291,8 +1300,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check_service_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1300,10 +1309,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionHealthCheckServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionHealthCheckServiceRequest): request = compute.InsertRegionHealthCheckServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1444,8 +1451,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1453,10 +1460,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionHealthCheckServicesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionHealthCheckServicesRequest): request = compute.ListRegionHealthCheckServicesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1594,8 +1599,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, health_check_service, health_check_service_resource] ) @@ -1605,10 +1610,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionHealthCheckServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionHealthCheckServiceRequest): request = compute.PatchRegionHealthCheckServiceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1742,8 +1745,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, health_check_service, health_check_service_resource] ) @@ -1753,10 +1756,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionHealthCheckServiceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionHealthCheckServiceRequest): request = compute.PatchRegionHealthCheckServiceRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py index c9020c487e80..5edb8303c529 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_checks/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionHealthChecksTransport]] = None, + transport: Optional[ + Union[ + str, + RegionHealthChecksTransport, + Callable[..., RegionHealthChecksTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionHealthChecksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionHealthChecksTransport,Callable[..., RegionHealthChecksTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionHealthChecksTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionHealthChecksTransport], + Callable[..., RegionHealthChecksTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionHealthChecksTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -723,8 +740,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check]) if request is not None and has_flattened_params: raise ValueError( @@ -732,10 +749,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionHealthCheckRequest): request = compute.DeleteRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -855,8 +870,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check]) if request is not None and has_flattened_params: raise ValueError( @@ -864,10 +879,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionHealthCheckRequest): request = compute.DeleteRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1027,8 +1040,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check]) if request is not None and has_flattened_params: raise ValueError( @@ -1036,10 +1049,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionHealthCheckRequest): request = compute.GetRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1157,8 +1168,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1166,10 +1177,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionHealthCheckRequest): request = compute.InsertRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1286,8 +1295,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, health_check_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1295,10 +1304,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionHealthCheckRequest): request = compute.InsertRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1438,8 +1445,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1447,10 +1454,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionHealthChecksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionHealthChecksRequest): request = compute.ListRegionHealthChecksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1585,8 +1590,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, health_check, health_check_resource] ) @@ -1596,10 +1601,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionHealthCheckRequest): request = compute.PatchRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1730,8 +1733,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, health_check, health_check_resource] ) @@ -1741,10 +1744,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionHealthCheckRequest): request = compute.PatchRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1898,8 +1899,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, health_check, health_check_resource] ) @@ -1909,10 +1910,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionHealthCheckRequest): request = compute.UpdateRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2041,8 +2040,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, health_check, health_check_resource] ) @@ -2052,10 +2051,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionHealthCheckRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionHealthCheckRequest): request = compute.UpdateRegionHealthCheckRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py index 8f3ae4e92be6..bacde397ba12 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_managers/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -510,7 +511,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionInstanceGroupManagersTransport]] = None, + transport: Optional[ + Union[ + str, + RegionInstanceGroupManagersTransport, + Callable[..., RegionInstanceGroupManagersTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -522,9 +529,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionInstanceGroupManagersTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionInstanceGroupManagersTransport,Callable[..., RegionInstanceGroupManagersTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionInstanceGroupManagersTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -638,8 +647,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionInstanceGroupManagersTransport], + Callable[..., RegionInstanceGroupManagersTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., RegionInstanceGroupManagersTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -752,8 +771,8 @@ def sample_abandon_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -768,10 +787,8 @@ def sample_abandon_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AbandonInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AbandonInstancesRegionInstanceGroupManagerRequest ): @@ -923,8 +940,8 @@ def sample_abandon_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -939,10 +956,8 @@ def sample_abandon_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AbandonInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AbandonInstancesRegionInstanceGroupManagerRequest ): @@ -1109,8 +1124,8 @@ def sample_apply_updates_to_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1125,10 +1140,8 @@ def sample_apply_updates_to_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest ): @@ -1274,8 +1287,8 @@ def sample_apply_updates_to_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1290,10 +1303,8 @@ def sample_apply_updates_to_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest ): @@ -1469,8 +1480,8 @@ def sample_create_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1485,10 +1496,8 @@ def sample_create_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CreateInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.CreateInstancesRegionInstanceGroupManagerRequest ): @@ -1635,8 +1644,8 @@ def sample_create_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1651,10 +1660,8 @@ def sample_create_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CreateInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.CreateInstancesRegionInstanceGroupManagerRequest ): @@ -1811,8 +1818,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -1820,10 +1827,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionInstanceGroupManagerRequest): request = compute.DeleteRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1946,8 +1951,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -1955,10 +1960,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionInstanceGroupManagerRequest): request = compute.DeleteRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2125,8 +2128,8 @@ def sample_delete_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2141,10 +2144,8 @@ def sample_delete_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DeleteInstancesRegionInstanceGroupManagerRequest ): @@ -2295,8 +2296,8 @@ def sample_delete_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2311,10 +2312,8 @@ def sample_delete_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DeleteInstancesRegionInstanceGroupManagerRequest ): @@ -2481,8 +2480,8 @@ def sample_delete_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2497,10 +2496,8 @@ def sample_delete_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest ): @@ -2646,8 +2643,8 @@ def sample_delete_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2662,10 +2659,8 @@ def sample_delete_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest ): @@ -2833,8 +2828,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -2842,10 +2837,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionInstanceGroupManagerRequest): request = compute.GetRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2973,8 +2966,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2982,10 +2975,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionInstanceGroupManagerRequest): request = compute.InsertRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3114,8 +3105,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -3123,10 +3114,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionInstanceGroupManagerRequest): request = compute.InsertRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3270,8 +3259,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -3279,10 +3268,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionInstanceGroupManagersRequest): request = compute.ListRegionInstanceGroupManagersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3416,8 +3403,8 @@ def sample_list_errors(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -3425,10 +3412,8 @@ def sample_list_errors(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListErrorsRegionInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListErrorsRegionInstanceGroupManagersRequest ): @@ -3568,8 +3553,8 @@ def sample_list_managed_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -3577,10 +3562,8 @@ def sample_list_managed_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListManagedInstancesRegionInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListManagedInstancesRegionInstanceGroupManagersRequest ): @@ -3721,8 +3704,8 @@ def sample_list_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager]) if request is not None and has_flattened_params: raise ValueError( @@ -3730,10 +3713,8 @@ def sample_list_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest ): @@ -3889,8 +3870,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, instance_group_manager, instance_group_manager_resource] ) @@ -3900,10 +3881,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionInstanceGroupManagerRequest): request = compute.PatchRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4048,8 +4027,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, instance_group_manager, instance_group_manager_resource] ) @@ -4059,10 +4038,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionInstanceGroupManagerRequest): request = compute.PatchRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4226,8 +4203,8 @@ def sample_patch_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4242,10 +4219,8 @@ def sample_patch_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest ): @@ -4393,8 +4368,8 @@ def sample_patch_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4409,10 +4384,8 @@ def sample_patch_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest ): @@ -4591,8 +4564,8 @@ def sample_recreate_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4607,10 +4580,8 @@ def sample_recreate_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RecreateInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RecreateInstancesRegionInstanceGroupManagerRequest ): @@ -4759,8 +4730,8 @@ def sample_recreate_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -4775,10 +4746,8 @@ def sample_recreate_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RecreateInstancesRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RecreateInstancesRegionInstanceGroupManagerRequest ): @@ -4952,8 +4921,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager, size]) if request is not None and has_flattened_params: raise ValueError( @@ -4961,10 +4930,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeRegionInstanceGroupManagerRequest): request = compute.ResizeRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5107,8 +5074,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group_manager, size]) if request is not None and has_flattened_params: raise ValueError( @@ -5116,10 +5083,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeRegionInstanceGroupManagerRequest): request = compute.ResizeRegionInstanceGroupManagerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -5278,8 +5243,8 @@ def sample_set_instance_template(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5294,10 +5259,8 @@ def sample_set_instance_template(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetInstanceTemplateRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetInstanceTemplateRegionInstanceGroupManagerRequest ): @@ -5437,8 +5400,8 @@ def sample_set_instance_template(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5453,10 +5416,8 @@ def sample_set_instance_template(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetInstanceTemplateRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetInstanceTemplateRegionInstanceGroupManagerRequest ): @@ -5619,8 +5580,8 @@ def sample_set_target_pools(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5635,10 +5596,8 @@ def sample_set_target_pools(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTargetPoolsRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetTargetPoolsRegionInstanceGroupManagerRequest ): @@ -5777,8 +5736,8 @@ def sample_set_target_pools(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5793,10 +5752,8 @@ def sample_set_target_pools(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetTargetPoolsRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetTargetPoolsRegionInstanceGroupManagerRequest ): @@ -5965,8 +5922,8 @@ def sample_update_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -5981,10 +5938,8 @@ def sample_update_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest ): @@ -6132,8 +6087,8 @@ def sample_update_per_instance_configs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -6148,10 +6103,8 @@ def sample_update_per_instance_configs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest ): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py index 0253d944e2ff..d73c7751e67f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_groups/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionInstanceGroupsTransport]] = None, + transport: Optional[ + Union[ + str, + RegionInstanceGroupsTransport, + Callable[..., RegionInstanceGroupsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionInstanceGroupsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionInstanceGroupsTransport,Callable[..., RegionInstanceGroupsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionInstanceGroupsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionInstanceGroupsTransport], + Callable[..., RegionInstanceGroupsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionInstanceGroupsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -735,8 +752,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_group]) if request is not None and has_flattened_params: raise ValueError( @@ -744,10 +761,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionInstanceGroupRequest): request = compute.GetRegionInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -863,8 +878,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -872,10 +887,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionInstanceGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionInstanceGroupsRequest): request = compute.ListRegionInstanceGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1017,8 +1030,8 @@ def sample_list_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1033,10 +1046,8 @@ def sample_list_instances(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListInstancesRegionInstanceGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListInstancesRegionInstanceGroupsRequest): request = compute.ListInstancesRegionInstanceGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1180,8 +1191,8 @@ def sample_set_named_ports(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1196,10 +1207,8 @@ def sample_set_named_ports(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetNamedPortsRegionInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetNamedPortsRegionInstanceGroupRequest): request = compute.SetNamedPortsRegionInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1334,8 +1343,8 @@ def sample_set_named_ports(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1350,10 +1359,8 @@ def sample_set_named_ports(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetNamedPortsRegionInstanceGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetNamedPortsRegionInstanceGroupRequest): request = compute.SetNamedPortsRegionInstanceGroupRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py index 7d4ad421b2cc..20a98387beb6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_templates/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionInstanceTemplatesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionInstanceTemplatesTransport, + Callable[..., RegionInstanceTemplatesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionInstanceTemplatesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionInstanceTemplatesTransport,Callable[..., RegionInstanceTemplatesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionInstanceTemplatesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -636,8 +645,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionInstanceTemplatesTransport], + Callable[..., RegionInstanceTemplatesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionInstanceTemplatesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -730,8 +747,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_template]) if request is not None and has_flattened_params: raise ValueError( @@ -739,10 +756,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionInstanceTemplateRequest): request = compute.DeleteRegionInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -865,8 +880,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_template]) if request is not None and has_flattened_params: raise ValueError( @@ -874,10 +889,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionInstanceTemplateRequest): request = compute.DeleteRegionInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1031,8 +1044,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_template]) if request is not None and has_flattened_params: raise ValueError( @@ -1040,10 +1053,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionInstanceTemplateRequest): request = compute.GetRegionInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1164,8 +1175,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_template_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1173,10 +1184,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionInstanceTemplateRequest): request = compute.InsertRegionInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1296,8 +1305,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instance_template_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1305,10 +1314,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionInstanceTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionInstanceTemplateRequest): request = compute.InsertRegionInstanceTemplateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1450,8 +1457,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1459,10 +1466,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionInstanceTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionInstanceTemplatesRequest): request = compute.ListRegionInstanceTemplatesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py index 58a40cf1bab4..eccd921bf254 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instances/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -505,7 +506,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionInstancesTransport]] = None, + transport: Optional[ + Union[ + str, RegionInstancesTransport, Callable[..., RegionInstancesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -517,9 +522,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionInstancesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionInstancesTransport,Callable[..., RegionInstancesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionInstancesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -631,8 +638,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionInstancesTransport], Callable[..., RegionInstancesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionInstancesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -722,8 +736,8 @@ def sample_bulk_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, bulk_insert_instance_resource_resource] ) @@ -733,10 +747,8 @@ def sample_bulk_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.BulkInsertRegionInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.BulkInsertRegionInstanceRequest): request = compute.BulkInsertRegionInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -857,8 +869,8 @@ def sample_bulk_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, bulk_insert_instance_resource_resource] ) @@ -868,10 +880,8 @@ def sample_bulk_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.BulkInsertRegionInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.BulkInsertRegionInstanceRequest): request = compute.BulkInsertRegionInstanceRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py index 3190287cdb61..ef6ed0ac1d03 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshots/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionInstantSnapshotsTransport]] = None, + transport: Optional[ + Union[ + str, + RegionInstantSnapshotsTransport, + Callable[..., RegionInstantSnapshotsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionInstantSnapshotsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionInstantSnapshotsTransport,Callable[..., RegionInstantSnapshotsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionInstantSnapshotsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -634,8 +643,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionInstantSnapshotsTransport], + Callable[..., RegionInstantSnapshotsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionInstantSnapshotsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -734,8 +751,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instant_snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -743,10 +760,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionInstantSnapshotRequest): request = compute.DeleteRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -875,8 +890,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instant_snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -884,10 +899,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionInstantSnapshotRequest): request = compute.DeleteRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1034,8 +1047,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instant_snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -1043,10 +1056,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionInstantSnapshotRequest): request = compute.GetRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1189,8 +1200,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1198,10 +1209,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyRegionInstantSnapshotRequest): request = compute.GetIamPolicyRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1318,8 +1327,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instant_snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1327,10 +1336,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionInstantSnapshotRequest): request = compute.InsertRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1446,8 +1453,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, instant_snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1455,10 +1462,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionInstantSnapshotRequest): request = compute.InsertRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1600,8 +1605,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1609,10 +1614,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionInstantSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionInstantSnapshotsRequest): request = compute.ListRegionInstantSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1769,8 +1772,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -1780,10 +1783,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyRegionInstantSnapshotRequest): request = compute.SetIamPolicyRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1917,8 +1918,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -1928,10 +1929,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsRegionInstantSnapshotRequest): request = compute.SetLabelsRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2065,8 +2064,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -2076,10 +2075,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsRegionInstantSnapshotRequest): request = compute.SetLabelsRegionInstantSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2237,8 +2234,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -2248,10 +2245,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsRegionInstantSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.TestIamPermissionsRegionInstantSnapshotRequest ): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py index e6b7a8cd2c14..7f6541a44f52 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -510,7 +511,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionNetworkEndpointGroupsTransport]] = None, + transport: Optional[ + Union[ + str, + RegionNetworkEndpointGroupsTransport, + Callable[..., RegionNetworkEndpointGroupsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -522,9 +529,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionNetworkEndpointGroupsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionNetworkEndpointGroupsTransport,Callable[..., RegionNetworkEndpointGroupsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionNetworkEndpointGroupsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -638,8 +647,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionNetworkEndpointGroupsTransport], + Callable[..., RegionNetworkEndpointGroupsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., RegionNetworkEndpointGroupsTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -743,8 +762,8 @@ def sample_attach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -759,10 +778,8 @@ def sample_attach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest ): @@ -907,8 +924,8 @@ def sample_attach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -923,10 +940,8 @@ def sample_attach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest ): @@ -1088,8 +1103,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1097,10 +1112,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionNetworkEndpointGroupRequest): request = compute.DeleteRegionNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1226,8 +1239,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1235,10 +1248,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionNetworkEndpointGroupRequest): request = compute.DeleteRegionNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1397,8 +1408,8 @@ def sample_detach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1413,10 +1424,8 @@ def sample_detach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest ): @@ -1561,8 +1570,8 @@ def sample_detach_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1577,10 +1586,8 @@ def sample_detach_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest ): @@ -1745,8 +1752,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -1754,10 +1761,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionNetworkEndpointGroupRequest): request = compute.GetRegionNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1879,8 +1884,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_endpoint_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1888,10 +1893,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionNetworkEndpointGroupRequest): request = compute.InsertRegionNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2014,8 +2017,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_endpoint_group_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2023,10 +2026,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionNetworkEndpointGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionNetworkEndpointGroupRequest): request = compute.InsertRegionNetworkEndpointGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2170,8 +2171,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -2179,10 +2180,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionNetworkEndpointGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionNetworkEndpointGroupsRequest): request = compute.ListRegionNetworkEndpointGroupsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2316,8 +2315,8 @@ def sample_list_network_endpoints(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network_endpoint_group]) if request is not None and has_flattened_params: raise ValueError( @@ -2325,10 +2324,8 @@ def sample_list_network_endpoints(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest ): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py index b3fa7226b69f..5719671874ee 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_network_firewall_policies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -510,7 +511,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionNetworkFirewallPoliciesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionNetworkFirewallPoliciesTransport, + Callable[..., RegionNetworkFirewallPoliciesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -522,9 +529,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionNetworkFirewallPoliciesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionNetworkFirewallPoliciesTransport,Callable[..., RegionNetworkFirewallPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionNetworkFirewallPoliciesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -642,8 +651,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionNetworkFirewallPoliciesTransport], + Callable[..., RegionNetworkFirewallPoliciesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., RegionNetworkFirewallPoliciesTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -744,8 +763,8 @@ def sample_add_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, firewall_policy, firewall_policy_association_resource] ) @@ -755,10 +774,8 @@ def sample_add_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddAssociationRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AddAssociationRegionNetworkFirewallPolicyRequest ): @@ -895,8 +912,8 @@ def sample_add_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, firewall_policy, firewall_policy_association_resource] ) @@ -906,10 +923,8 @@ def sample_add_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddAssociationRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.AddAssociationRegionNetworkFirewallPolicyRequest ): @@ -1068,8 +1083,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, firewall_policy, firewall_policy_rule_resource] ) @@ -1079,10 +1094,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleRegionNetworkFirewallPolicyRequest): request = compute.AddRuleRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1212,8 +1225,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, firewall_policy, firewall_policy_rule_resource] ) @@ -1223,10 +1236,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleRegionNetworkFirewallPolicyRequest): request = compute.AddRuleRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1376,8 +1387,8 @@ def sample_clone_rules(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1385,10 +1396,8 @@ def sample_clone_rules(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CloneRulesRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.CloneRulesRegionNetworkFirewallPolicyRequest ): @@ -1513,8 +1522,8 @@ def sample_clone_rules(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1522,10 +1531,8 @@ def sample_clone_rules(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.CloneRulesRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.CloneRulesRegionNetworkFirewallPolicyRequest ): @@ -1674,8 +1681,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1683,10 +1690,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionNetworkFirewallPolicyRequest): request = compute.DeleteRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1808,8 +1813,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1817,10 +1822,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionNetworkFirewallPolicyRequest): request = compute.DeleteRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1965,8 +1968,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1974,10 +1977,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionNetworkFirewallPolicyRequest): request = compute.GetRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2097,8 +2098,8 @@ def sample_get_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -2106,10 +2107,8 @@ def sample_get_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetAssociationRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.GetAssociationRegionNetworkFirewallPolicyRequest ): @@ -2229,8 +2228,8 @@ def sample_get_effective_firewalls(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, network]) if request is not None and has_flattened_params: raise ValueError( @@ -2238,10 +2237,8 @@ def sample_get_effective_firewalls(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest ): @@ -2387,8 +2384,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2396,10 +2393,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.GetIamPolicyRegionNetworkFirewallPolicyRequest ): @@ -2525,8 +2520,8 @@ def sample_get_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -2534,10 +2529,8 @@ def sample_get_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRuleRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRuleRegionNetworkFirewallPolicyRequest): request = compute.GetRuleRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2657,8 +2650,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2666,10 +2659,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionNetworkFirewallPolicyRequest): request = compute.InsertRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2788,8 +2779,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2797,10 +2788,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionNetworkFirewallPolicyRequest): request = compute.InsertRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2941,8 +2930,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -2950,10 +2939,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionNetworkFirewallPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionNetworkFirewallPoliciesRequest): request = compute.ListRegionNetworkFirewallPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3087,8 +3074,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, firewall_policy, firewall_policy_resource] ) @@ -3098,10 +3085,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionNetworkFirewallPolicyRequest): request = compute.PatchRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3231,8 +3216,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, firewall_policy, firewall_policy_resource] ) @@ -3242,10 +3227,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionNetworkFirewallPolicyRequest): request = compute.PatchRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3400,8 +3383,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, firewall_policy, firewall_policy_rule_resource] ) @@ -3411,10 +3394,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleRegionNetworkFirewallPolicyRequest): request = compute.PatchRuleRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3544,8 +3525,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, firewall_policy, firewall_policy_rule_resource] ) @@ -3555,10 +3536,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleRegionNetworkFirewallPolicyRequest): request = compute.PatchRuleRegionNetworkFirewallPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3708,8 +3687,8 @@ def sample_remove_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3717,10 +3696,8 @@ def sample_remove_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveAssociationRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RemoveAssociationRegionNetworkFirewallPolicyRequest ): @@ -3847,8 +3824,8 @@ def sample_remove_association(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -3856,10 +3833,8 @@ def sample_remove_association(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveAssociationRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RemoveAssociationRegionNetworkFirewallPolicyRequest ): @@ -4010,8 +3985,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -4019,10 +3994,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RemoveRuleRegionNetworkFirewallPolicyRequest ): @@ -4146,8 +4119,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, firewall_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -4155,10 +4128,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.RemoveRuleRegionNetworkFirewallPolicyRequest ): @@ -4336,8 +4307,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -4347,10 +4318,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetIamPolicyRegionNetworkFirewallPolicyRequest ): @@ -4485,8 +4454,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -4496,10 +4465,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest ): diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py index d075d9013206..453f7125bd0e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_notification_endpoints/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -510,7 +511,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionNotificationEndpointsTransport]] = None, + transport: Optional[ + Union[ + str, + RegionNotificationEndpointsTransport, + Callable[..., RegionNotificationEndpointsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -522,9 +529,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionNotificationEndpointsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionNotificationEndpointsTransport,Callable[..., RegionNotificationEndpointsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionNotificationEndpointsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -638,8 +647,18 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionNotificationEndpointsTransport], + Callable[..., RegionNotificationEndpointsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., RegionNotificationEndpointsTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -732,8 +751,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, notification_endpoint]) if request is not None and has_flattened_params: raise ValueError( @@ -741,10 +760,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionNotificationEndpointRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionNotificationEndpointRequest): request = compute.DeleteRegionNotificationEndpointRequest(request) # If we have keyword arguments corresponding to fields on the @@ -867,8 +884,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, notification_endpoint]) if request is not None and has_flattened_params: raise ValueError( @@ -876,10 +893,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionNotificationEndpointRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionNotificationEndpointRequest): request = compute.DeleteRegionNotificationEndpointRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1032,8 +1047,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, notification_endpoint]) if request is not None and has_flattened_params: raise ValueError( @@ -1041,10 +1056,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionNotificationEndpointRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionNotificationEndpointRequest): request = compute.GetRegionNotificationEndpointRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1165,8 +1178,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, notification_endpoint_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1174,10 +1187,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionNotificationEndpointRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionNotificationEndpointRequest): request = compute.InsertRegionNotificationEndpointRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1297,8 +1308,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, notification_endpoint_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1306,10 +1317,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionNotificationEndpointRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionNotificationEndpointRequest): request = compute.InsertRegionNotificationEndpointRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1449,8 +1458,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1458,10 +1467,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionNotificationEndpointsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionNotificationEndpointsRequest): request = compute.ListRegionNotificationEndpointsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py index ed31e69f8a1f..40c7b627974d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_operations/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -503,7 +504,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionOperationsTransport]] = None, + transport: Optional[ + Union[ + str, RegionOperationsTransport, Callable[..., RegionOperationsTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -515,9 +520,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionOperationsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionOperationsTransport,Callable[..., RegionOperationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionOperationsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -629,8 +636,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionOperationsTransport], + Callable[..., RegionOperationsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionOperationsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -720,8 +735,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -729,10 +744,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionOperationRequest): request = compute.DeleteRegionOperationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -866,8 +879,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -875,10 +888,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionOperationRequest): request = compute.GetRegionOperationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -992,8 +1003,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1001,10 +1012,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionOperationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionOperationsRequest): request = compute.ListRegionOperationsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1155,8 +1164,8 @@ def sample_wait(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -1164,10 +1173,8 @@ def sample_wait(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.WaitRegionOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.WaitRegionOperationRequest): request = compute.WaitRegionOperationRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py index 0dadccc25204..cab190f98688 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_security_policies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionSecurityPoliciesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionSecurityPoliciesTransport, + Callable[..., RegionSecurityPoliciesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionSecurityPoliciesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionSecurityPoliciesTransport,Callable[..., RegionSecurityPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionSecurityPoliciesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -634,8 +643,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionSecurityPoliciesTransport], + Callable[..., RegionSecurityPoliciesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionSecurityPoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -733,8 +750,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, security_policy, security_policy_rule_resource] ) @@ -744,10 +761,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleRegionSecurityPolicyRequest): request = compute.AddRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -877,8 +892,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, security_policy, security_policy_rule_resource] ) @@ -888,10 +903,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleRegionSecurityPolicyRequest): request = compute.AddRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1040,8 +1053,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1049,10 +1062,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionSecurityPolicyRequest): request = compute.DeleteRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1174,8 +1185,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1183,10 +1194,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionSecurityPolicyRequest): request = compute.DeleteRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1334,8 +1343,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1343,10 +1352,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionSecurityPolicyRequest): request = compute.GetRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1470,8 +1477,8 @@ def sample_get_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1479,10 +1486,8 @@ def sample_get_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRuleRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRuleRegionSecurityPolicyRequest): request = compute.GetRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1602,8 +1607,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, security_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1611,10 +1616,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionSecurityPolicyRequest): request = compute.InsertRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1733,8 +1736,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, security_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1742,10 +1745,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionSecurityPolicyRequest): request = compute.InsertRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1885,8 +1886,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1894,10 +1895,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionSecurityPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionSecurityPoliciesRequest): request = compute.ListRegionSecurityPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2034,8 +2033,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, security_policy, security_policy_resource] ) @@ -2045,10 +2044,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionSecurityPolicyRequest): request = compute.PatchRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2181,8 +2178,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, security_policy, security_policy_resource] ) @@ -2192,10 +2189,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionSecurityPolicyRequest): request = compute.PatchRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2352,8 +2347,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, security_policy, security_policy_rule_resource] ) @@ -2363,10 +2358,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleRegionSecurityPolicyRequest): request = compute.PatchRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2498,8 +2491,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, security_policy, security_policy_rule_resource] ) @@ -2509,10 +2502,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleRegionSecurityPolicyRequest): request = compute.PatchRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2661,8 +2652,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -2670,10 +2661,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveRuleRegionSecurityPolicyRequest): request = compute.RemoveRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2795,8 +2784,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -2804,10 +2793,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleRegionSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveRuleRegionSecurityPolicyRequest): request = compute.RemoveRuleRegionSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py index 2b4ce6ad7a28..73c1c5aa9202 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_certificates/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionSslCertificatesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionSslCertificatesTransport, + Callable[..., RegionSslCertificatesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionSslCertificatesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionSslCertificatesTransport,Callable[..., RegionSslCertificatesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionSslCertificatesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -634,8 +643,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionSslCertificatesTransport], + Callable[..., RegionSslCertificatesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionSslCertificatesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -728,8 +745,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_certificate]) if request is not None and has_flattened_params: raise ValueError( @@ -737,10 +754,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionSslCertificateRequest): request = compute.DeleteRegionSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -863,8 +878,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_certificate]) if request is not None and has_flattened_params: raise ValueError( @@ -872,10 +887,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionSslCertificateRequest): request = compute.DeleteRegionSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1040,8 +1053,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_certificate]) if request is not None and has_flattened_params: raise ValueError( @@ -1049,10 +1062,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionSslCertificateRequest): request = compute.GetRegionSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1173,8 +1184,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_certificate_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1182,10 +1193,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionSslCertificateRequest): request = compute.InsertRegionSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1305,8 +1314,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_certificate_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1314,10 +1323,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionSslCertificateRequest): request = compute.InsertRegionSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1458,8 +1465,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1467,10 +1474,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionSslCertificatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionSslCertificatesRequest): request = compute.ListRegionSslCertificatesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py index d74e2a459f27..98d8a1a56b9e 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_ssl_policies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionSslPoliciesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionSslPoliciesTransport, + Callable[..., RegionSslPoliciesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionSslPoliciesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionSslPoliciesTransport,Callable[..., RegionSslPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionSslPoliciesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionSslPoliciesTransport], + Callable[..., RegionSslPoliciesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionSslPoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -726,8 +743,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -735,10 +752,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionSslPolicyRequest): request = compute.DeleteRegionSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -861,8 +876,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -870,10 +885,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionSslPolicyRequest): request = compute.DeleteRegionSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1025,8 +1038,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1034,10 +1047,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionSslPolicyRequest): request = compute.GetRegionSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1155,8 +1166,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1164,10 +1175,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionSslPolicyRequest): request = compute.InsertRegionSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1284,8 +1293,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1293,10 +1302,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionSslPolicyRequest): request = compute.InsertRegionSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1434,8 +1441,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1443,10 +1450,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionSslPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionSslPoliciesRequest): request = compute.ListRegionSslPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1564,8 +1569,8 @@ def sample_list_available_features(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1573,10 +1578,8 @@ def sample_list_available_features(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListAvailableFeaturesRegionSslPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListAvailableFeaturesRegionSslPoliciesRequest ): @@ -1703,8 +1706,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_policy, ssl_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1712,10 +1715,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionSslPolicyRequest): request = compute.PatchRegionSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1845,8 +1846,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, ssl_policy, ssl_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1854,10 +1855,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionSslPolicyRequest): request = compute.PatchRegionSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py index 3e2e941cd579..52a4c4b44608 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_http_proxies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionTargetHttpProxiesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionTargetHttpProxiesTransport, + Callable[..., RegionTargetHttpProxiesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionTargetHttpProxiesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionTargetHttpProxiesTransport,Callable[..., RegionTargetHttpProxiesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionTargetHttpProxiesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -636,8 +645,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionTargetHttpProxiesTransport], + Callable[..., RegionTargetHttpProxiesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionTargetHttpProxiesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -729,8 +746,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_http_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -738,10 +755,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionTargetHttpProxyRequest): request = compute.DeleteRegionTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -863,8 +878,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_http_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -872,10 +887,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionTargetHttpProxyRequest): request = compute.DeleteRegionTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1035,8 +1048,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_http_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -1044,10 +1057,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionTargetHttpProxyRequest): request = compute.GetRegionTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1168,8 +1179,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_http_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1177,10 +1188,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionTargetHttpProxyRequest): request = compute.InsertRegionTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1300,8 +1309,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_http_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1309,10 +1318,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionTargetHttpProxyRequest): request = compute.InsertRegionTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1455,8 +1462,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1464,10 +1471,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionTargetHttpProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionTargetHttpProxiesRequest): request = compute.ListRegionTargetHttpProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1601,8 +1606,8 @@ def sample_set_url_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_http_proxy, url_map_reference_resource] ) @@ -1612,10 +1617,8 @@ def sample_set_url_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUrlMapRegionTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUrlMapRegionTargetHttpProxyRequest): request = compute.SetUrlMapRegionTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1745,8 +1748,8 @@ def sample_set_url_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_http_proxy, url_map_reference_resource] ) @@ -1756,10 +1759,8 @@ def sample_set_url_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUrlMapRegionTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUrlMapRegionTargetHttpProxyRequest): request = compute.SetUrlMapRegionTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py index f5408cf15b3d..d9ccba9782ec 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_https_proxies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionTargetHttpsProxiesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionTargetHttpsProxiesTransport, + Callable[..., RegionTargetHttpsProxiesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionTargetHttpsProxiesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionTargetHttpsProxiesTransport,Callable[..., RegionTargetHttpsProxiesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionTargetHttpsProxiesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -636,8 +645,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionTargetHttpsProxiesTransport], + Callable[..., RegionTargetHttpsProxiesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionTargetHttpsProxiesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -729,8 +746,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_https_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -738,10 +755,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionTargetHttpsProxyRequest): request = compute.DeleteRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -863,8 +878,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_https_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -872,10 +887,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionTargetHttpsProxyRequest): request = compute.DeleteRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1035,8 +1048,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_https_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -1044,10 +1057,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionTargetHttpsProxyRequest): request = compute.GetRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1168,8 +1179,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_https_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1177,10 +1188,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionTargetHttpsProxyRequest): request = compute.InsertRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1300,8 +1309,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_https_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1309,10 +1318,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionTargetHttpsProxyRequest): request = compute.InsertRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1455,8 +1462,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1464,10 +1471,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionTargetHttpsProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionTargetHttpsProxiesRequest): request = compute.ListRegionTargetHttpsProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1602,8 +1607,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_https_proxy, target_https_proxy_resource] ) @@ -1613,10 +1618,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionTargetHttpsProxyRequest): request = compute.PatchRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1747,8 +1750,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_https_proxy, target_https_proxy_resource] ) @@ -1758,10 +1761,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionTargetHttpsProxyRequest): request = compute.PatchRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1918,8 +1919,8 @@ def sample_set_ssl_certificates(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1934,10 +1935,8 @@ def sample_set_ssl_certificates(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslCertificatesRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetSslCertificatesRegionTargetHttpsProxyRequest ): @@ -2076,8 +2075,8 @@ def sample_set_ssl_certificates(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2092,10 +2091,8 @@ def sample_set_ssl_certificates(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslCertificatesRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.SetSslCertificatesRegionTargetHttpsProxyRequest ): @@ -2257,8 +2254,8 @@ def sample_set_url_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_https_proxy, url_map_reference_resource] ) @@ -2268,10 +2265,8 @@ def sample_set_url_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUrlMapRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUrlMapRegionTargetHttpsProxyRequest): request = compute.SetUrlMapRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2401,8 +2396,8 @@ def sample_set_url_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_https_proxy, url_map_reference_resource] ) @@ -2412,10 +2407,8 @@ def sample_set_url_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUrlMapRegionTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUrlMapRegionTargetHttpsProxyRequest): request = compute.SetUrlMapRegionTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py index df795a87695a..34e07a352e8f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_target_tcp_proxies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -508,7 +509,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionTargetTcpProxiesTransport]] = None, + transport: Optional[ + Union[ + str, + RegionTargetTcpProxiesTransport, + Callable[..., RegionTargetTcpProxiesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -520,9 +527,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionTargetTcpProxiesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionTargetTcpProxiesTransport,Callable[..., RegionTargetTcpProxiesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionTargetTcpProxiesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -634,8 +643,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionTargetTcpProxiesTransport], + Callable[..., RegionTargetTcpProxiesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionTargetTcpProxiesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -727,8 +744,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_tcp_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -736,10 +753,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionTargetTcpProxyRequest): request = compute.DeleteRegionTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -861,8 +876,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_tcp_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -870,10 +885,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionTargetTcpProxyRequest): request = compute.DeleteRegionTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1024,8 +1037,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_tcp_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -1033,10 +1046,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionTargetTcpProxyRequest): request = compute.GetRegionTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1157,8 +1168,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_tcp_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1166,10 +1177,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionTargetTcpProxyRequest): request = compute.InsertRegionTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1289,8 +1298,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_tcp_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1298,10 +1307,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionTargetTcpProxyRequest): request = compute.InsertRegionTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1443,8 +1450,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1452,10 +1459,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionTargetTcpProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionTargetTcpProxiesRequest): request = compute.ListRegionTargetTcpProxiesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py index d423b52411d7..e675fca3c996 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_url_maps/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionUrlMapsTransport]] = None, + transport: Optional[ + Union[str, RegionUrlMapsTransport, Callable[..., RegionUrlMapsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionUrlMapsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionUrlMapsTransport,Callable[..., RegionUrlMapsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionUrlMapsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -630,8 +635,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionUrlMapsTransport], Callable[..., RegionUrlMapsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionUrlMapsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -721,8 +733,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map]) if request is not None and has_flattened_params: raise ValueError( @@ -730,10 +742,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionUrlMapRequest): request = compute.DeleteRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -853,8 +863,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map]) if request is not None and has_flattened_params: raise ValueError( @@ -862,10 +872,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRegionUrlMapRequest): request = compute.DeleteRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1033,8 +1041,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map]) if request is not None and has_flattened_params: raise ValueError( @@ -1042,10 +1050,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionUrlMapRequest): request = compute.GetRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1163,8 +1169,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1172,10 +1178,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionUrlMapRequest): request = compute.InsertRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1292,8 +1296,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1301,10 +1305,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRegionUrlMapRequest): request = compute.InsertRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1444,8 +1446,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1453,10 +1455,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionUrlMapsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionUrlMapsRequest): request = compute.ListRegionUrlMapsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1589,8 +1589,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1598,10 +1598,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionUrlMapRequest): request = compute.PatchRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1730,8 +1728,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1739,10 +1737,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRegionUrlMapRequest): request = compute.PatchRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1896,8 +1892,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1905,10 +1901,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionUrlMapRequest): request = compute.UpdateRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2037,8 +2031,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, url_map, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2046,10 +2040,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRegionUrlMapRequest): request = compute.UpdateRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2204,8 +2196,8 @@ def sample_validate(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, url_map, region_url_maps_validate_request_resource] ) @@ -2215,10 +2207,8 @@ def sample_validate(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ValidateRegionUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ValidateRegionUrlMapRequest): request = compute.ValidateRegionUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py index 0aa78a18f0ad..b16b2f5573b4 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_zones/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -501,7 +502,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionZonesTransport]] = None, + transport: Optional[ + Union[str, RegionZonesTransport, Callable[..., RegionZonesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -513,9 +516,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionZonesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionZonesTransport,Callable[..., RegionZonesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionZonesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -624,8 +629,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionZonesTransport], Callable[..., RegionZonesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionZonesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -709,8 +721,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -718,10 +730,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionZonesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionZonesRequest): request = compute.ListRegionZonesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py index 159e90598d5b..1a9d893d3c45 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/regions/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -501,7 +502,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RegionsTransport]] = None, + transport: Optional[ + Union[str, RegionsTransport, Callable[..., RegionsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -513,9 +516,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RegionsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RegionsTransport,Callable[..., RegionsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -624,8 +629,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RegionsTransport], Callable[..., RegionsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -715,8 +727,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -724,10 +736,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRegionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRegionRequest): request = compute.GetRegionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -837,8 +847,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -846,10 +856,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRegionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRegionsRequest): request = compute.ListRegionsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py index 5b2dfba5fc83..381ab5f9db54 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/reservations/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ReservationsTransport]] = None, + transport: Optional[ + Union[str, ReservationsTransport, Callable[..., ReservationsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ReservationsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ReservationsTransport,Callable[..., ReservationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ReservationsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ReservationsTransport], Callable[..., ReservationsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ReservationsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -708,8 +720,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -717,10 +729,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListReservationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListReservationsRequest): request = compute.AggregatedListReservationsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -835,8 +845,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, reservation]) if request is not None and has_flattened_params: raise ValueError( @@ -844,10 +854,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteReservationRequest): request = compute.DeleteReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -963,8 +971,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, reservation]) if request is not None and has_flattened_params: raise ValueError( @@ -972,10 +980,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteReservationRequest): request = compute.DeleteReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1121,8 +1127,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, reservation]) if request is not None and has_flattened_params: raise ValueError( @@ -1130,10 +1136,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetReservationRequest): request = compute.GetReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1274,8 +1278,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1283,10 +1287,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyReservationRequest): request = compute.GetIamPolicyReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1402,8 +1404,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, reservation_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1411,10 +1413,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertReservationRequest): request = compute.InsertReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1529,8 +1529,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, reservation_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1538,10 +1538,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertReservationRequest): request = compute.InsertReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1677,8 +1675,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -1686,10 +1684,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListReservationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListReservationsRequest): request = compute.ListReservationsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1821,8 +1817,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, reservation, reservations_resize_request_resource] ) @@ -1832,10 +1828,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeReservationRequest): request = compute.ResizeReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1965,8 +1959,8 @@ def sample_resize(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, reservation, reservations_resize_request_resource] ) @@ -1976,10 +1970,8 @@ def sample_resize(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ResizeReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ResizeReservationRequest): request = compute.ResizeReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2155,8 +2147,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, zone_set_policy_request_resource] ) @@ -2166,10 +2158,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyReservationRequest): request = compute.SetIamPolicyReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2302,8 +2292,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, resource, test_permissions_request_resource] ) @@ -2313,10 +2303,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsReservationRequest): request = compute.TestIamPermissionsReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2442,8 +2430,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, reservation, reservation_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2451,10 +2439,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateReservationRequest): request = compute.UpdateReservationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2578,8 +2564,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, reservation, reservation_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2587,10 +2573,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateReservationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateReservationRequest): request = compute.UpdateReservationRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py index d22b666c474b..f03455d73cba 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/resource_policies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ResourcePoliciesTransport]] = None, + transport: Optional[ + Union[ + str, ResourcePoliciesTransport, Callable[..., ResourcePoliciesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ResourcePoliciesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ResourcePoliciesTransport,Callable[..., ResourcePoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ResourcePoliciesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ResourcePoliciesTransport], + Callable[..., ResourcePoliciesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ResourcePoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -713,8 +728,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -722,10 +737,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListResourcePoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListResourcePoliciesRequest): request = compute.AggregatedListResourcePoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -842,8 +855,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -851,10 +864,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteResourcePolicyRequest): request = compute.DeleteResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -972,8 +983,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -981,10 +992,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteResourcePolicyRequest): request = compute.DeleteResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1132,8 +1141,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1141,10 +1150,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetResourcePolicyRequest): request = compute.GetResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1287,8 +1294,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1296,10 +1303,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyResourcePolicyRequest): request = compute.GetIamPolicyResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1414,8 +1419,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1423,10 +1428,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertResourcePolicyRequest): request = compute.InsertResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1540,8 +1543,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1549,10 +1552,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertResourcePolicyRequest): request = compute.InsertResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1689,8 +1690,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1698,10 +1699,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListResourcePoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListResourcePoliciesRequest): request = compute.ListResourcePoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1829,8 +1828,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource_policy, resource_policy_resource] ) @@ -1840,10 +1839,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchResourcePolicyRequest): request = compute.PatchResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1967,8 +1964,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource_policy, resource_policy_resource] ) @@ -1978,10 +1975,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchResourcePolicyRequest): request = compute.PatchResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2159,8 +2154,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -2170,10 +2165,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyResourcePolicyRequest): request = compute.SetIamPolicyResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2306,8 +2299,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -2317,10 +2310,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsResourcePolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsResourcePolicyRequest): request = compute.TestIamPermissionsResourcePolicyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py index efd9fe6738bf..0b9708b3ebce 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routers/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RoutersTransport]] = None, + transport: Optional[ + Union[str, RoutersTransport, Callable[..., RoutersTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RoutersTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RoutersTransport,Callable[..., RoutersTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RoutersTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RoutersTransport], Callable[..., RoutersTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RoutersTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -706,8 +718,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -715,10 +727,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListRoutersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListRoutersRequest): request = compute.AggregatedListRoutersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -834,8 +844,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router]) if request is not None and has_flattened_params: raise ValueError( @@ -843,10 +853,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRouterRequest): request = compute.DeleteRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -963,8 +971,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router]) if request is not None and has_flattened_params: raise ValueError( @@ -972,10 +980,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRouterRequest): request = compute.DeleteRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1118,8 +1124,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router]) if request is not None and has_flattened_params: raise ValueError( @@ -1127,10 +1133,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRouterRequest): request = compute.GetRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1247,8 +1251,8 @@ def sample_get_nat_ip_info(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router]) if request is not None and has_flattened_params: raise ValueError( @@ -1256,10 +1260,8 @@ def sample_get_nat_ip_info(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNatIpInfoRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNatIpInfoRouterRequest): request = compute.GetNatIpInfoRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1383,8 +1385,8 @@ def sample_get_nat_mapping_info(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router]) if request is not None and has_flattened_params: raise ValueError( @@ -1392,10 +1394,8 @@ def sample_get_nat_mapping_info(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetNatMappingInfoRoutersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetNatMappingInfoRoutersRequest): request = compute.GetNatMappingInfoRoutersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1519,8 +1519,8 @@ def sample_get_router_status(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router]) if request is not None and has_flattened_params: raise ValueError( @@ -1528,10 +1528,8 @@ def sample_get_router_status(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRouterStatusRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRouterStatusRouterRequest): request = compute.GetRouterStatusRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1646,8 +1644,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1655,10 +1653,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRouterRequest): request = compute.InsertRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1772,8 +1768,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1781,10 +1777,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRouterRequest): request = compute.InsertRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1921,8 +1915,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1930,10 +1924,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRoutersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRoutersRequest): request = compute.ListRoutersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2063,8 +2055,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router, router_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2072,10 +2064,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRouterRequest): request = compute.PatchRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2201,8 +2191,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router, router_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2210,10 +2200,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRouterRequest): request = compute.PatchRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2362,8 +2350,8 @@ def sample_preview(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router, router_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2371,10 +2359,8 @@ def sample_preview(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PreviewRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PreviewRouterRequest): request = compute.PreviewRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2504,8 +2490,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router, router_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2513,10 +2499,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRouterRequest): request = compute.UpdateRouterRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2646,8 +2630,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, router, router_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2655,10 +2639,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateRouterRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateRouterRequest): request = compute.UpdateRouterRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py index 3cc8c3a3dfe4..f6d3706b681a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/routes/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, RoutesTransport]] = None, + transport: Optional[ + Union[str, RoutesTransport, Callable[..., RoutesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, RoutesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,RoutesTransport,Callable[..., RoutesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RoutesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[RoutesTransport], Callable[..., RoutesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RoutesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -706,8 +718,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, route]) if request is not None and has_flattened_params: raise ValueError( @@ -715,10 +727,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRouteRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRouteRequest): request = compute.DeleteRouteRequest(request) # If we have keyword arguments corresponding to fields on the @@ -823,8 +833,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, route]) if request is not None and has_flattened_params: raise ValueError( @@ -832,10 +842,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteRouteRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteRouteRequest): request = compute.DeleteRouteRequest(request) # If we have keyword arguments corresponding to fields on the @@ -968,8 +976,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, route]) if request is not None and has_flattened_params: raise ValueError( @@ -977,10 +985,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRouteRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRouteRequest): request = compute.GetRouteRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1085,8 +1091,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, route_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1094,10 +1100,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRouteRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRouteRequest): request = compute.InsertRouteRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1197,8 +1201,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, route_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1206,10 +1210,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertRouteRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertRouteRequest): request = compute.InsertRouteRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1331,8 +1333,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1340,10 +1342,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListRoutesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListRoutesRequest): request = compute.ListRoutesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py index 452d2b3edef6..e60ce05278c2 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/security_policies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SecurityPoliciesTransport]] = None, + transport: Optional[ + Union[ + str, SecurityPoliciesTransport, Callable[..., SecurityPoliciesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, SecurityPoliciesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SecurityPoliciesTransport,Callable[..., SecurityPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SecurityPoliciesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[SecurityPoliciesTransport], + Callable[..., SecurityPoliciesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SecurityPoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -720,8 +735,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, security_policy, security_policy_rule_resource] ) @@ -731,10 +746,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleSecurityPolicyRequest): request = compute.AddRuleSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -850,8 +863,8 @@ def sample_add_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, security_policy, security_policy_rule_resource] ) @@ -861,10 +874,8 @@ def sample_add_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddRuleSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddRuleSecurityPolicyRequest): request = compute.AddRuleSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -998,8 +1009,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1007,10 +1018,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListSecurityPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListSecurityPoliciesRequest): request = compute.AggregatedListSecurityPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1120,8 +1129,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1129,10 +1138,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSecurityPolicyRequest): request = compute.DeleteSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1240,8 +1247,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1249,10 +1256,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSecurityPolicyRequest): request = compute.DeleteSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1387,8 +1392,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1396,10 +1401,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetSecurityPolicyRequest): request = compute.GetSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1509,8 +1512,8 @@ def sample_get_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1518,10 +1521,8 @@ def sample_get_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetRuleSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetRuleSecurityPolicyRequest): request = compute.GetRuleSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1627,8 +1628,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1636,10 +1637,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSecurityPolicyRequest): request = compute.InsertSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1740,8 +1739,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1749,10 +1748,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSecurityPolicyRequest): request = compute.InsertSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1873,8 +1870,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1882,10 +1879,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListSecurityPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListSecurityPoliciesRequest): request = compute.ListSecurityPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1987,8 +1982,8 @@ def sample_list_preconfigured_expression_sets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1996,10 +1991,8 @@ def sample_list_preconfigured_expression_sets(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance( request, compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest ): @@ -2117,8 +2110,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy, security_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2126,10 +2119,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchSecurityPolicyRequest): request = compute.PatchSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2250,8 +2241,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy, security_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2259,10 +2250,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchSecurityPolicyRequest): request = compute.PatchSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2404,8 +2393,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, security_policy, security_policy_rule_resource] ) @@ -2415,10 +2404,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleSecurityPolicyRequest): request = compute.PatchRuleSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2536,8 +2523,8 @@ def sample_patch_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, security_policy, security_policy_rule_resource] ) @@ -2547,10 +2534,8 @@ def sample_patch_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchRuleSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchRuleSecurityPolicyRequest): request = compute.PatchRuleSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2684,8 +2669,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -2693,10 +2678,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveRuleSecurityPolicyRequest): request = compute.RemoveRuleSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2804,8 +2787,8 @@ def sample_remove_rule(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, security_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -2813,10 +2796,8 @@ def sample_remove_rule(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveRuleSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveRuleSecurityPolicyRequest): request = compute.RemoveRuleSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2957,8 +2938,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -2968,10 +2949,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsSecurityPolicyRequest): request = compute.SetLabelsSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3092,8 +3071,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -3103,10 +3082,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsSecurityPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsSecurityPolicyRequest): request = compute.SetLabelsSecurityPolicyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py index c30223c5deb6..ac8bdf6dc782 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/service_attachments/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ServiceAttachmentsTransport]] = None, + transport: Optional[ + Union[ + str, + ServiceAttachmentsTransport, + Callable[..., ServiceAttachmentsTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ServiceAttachmentsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ServiceAttachmentsTransport,Callable[..., ServiceAttachmentsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ServiceAttachmentsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ServiceAttachmentsTransport], + Callable[..., ServiceAttachmentsTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ServiceAttachmentsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -716,8 +733,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -725,10 +742,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListServiceAttachmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListServiceAttachmentsRequest): request = compute.AggregatedListServiceAttachmentsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -846,8 +861,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, service_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -855,10 +870,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteServiceAttachmentRequest): request = compute.DeleteServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -977,8 +990,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, service_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -986,10 +999,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteServiceAttachmentRequest): request = compute.DeleteServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1139,8 +1150,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, service_attachment]) if request is not None and has_flattened_params: raise ValueError( @@ -1148,10 +1159,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetServiceAttachmentRequest): request = compute.GetServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1294,8 +1303,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1303,10 +1312,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicyServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicyServiceAttachmentRequest): request = compute.GetIamPolicyServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1423,8 +1430,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, service_attachment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1432,10 +1439,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertServiceAttachmentRequest): request = compute.InsertServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1551,8 +1556,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, service_attachment_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1560,10 +1565,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertServiceAttachmentRequest): request = compute.InsertServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1699,8 +1702,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1708,10 +1711,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListServiceAttachmentsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListServiceAttachmentsRequest): request = compute.ListServiceAttachmentsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1848,8 +1849,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, service_attachment, service_attachment_resource] ) @@ -1859,10 +1860,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchServiceAttachmentRequest): request = compute.PatchServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1995,8 +1994,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, service_attachment, service_attachment_resource] ) @@ -2006,10 +2005,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchServiceAttachmentRequest): request = compute.PatchServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2187,8 +2184,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -2198,10 +2195,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicyServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicyServiceAttachmentRequest): request = compute.SetIamPolicyServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2334,8 +2329,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -2345,10 +2340,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsServiceAttachmentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsServiceAttachmentRequest): request = compute.TestIamPermissionsServiceAttachmentRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py index 0bc8709781a9..a25ad4685a7a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshot_settings_service/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -507,7 +508,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SnapshotSettingsServiceTransport]] = None, + transport: Optional[ + Union[ + str, + SnapshotSettingsServiceTransport, + Callable[..., SnapshotSettingsServiceTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -519,9 +526,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, SnapshotSettingsServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SnapshotSettingsServiceTransport,Callable[..., SnapshotSettingsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SnapshotSettingsServiceTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -635,8 +644,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[SnapshotSettingsServiceTransport], + Callable[..., SnapshotSettingsServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SnapshotSettingsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -706,8 +723,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -715,10 +732,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetSnapshotSettingRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetSnapshotSettingRequest): request = compute.GetSnapshotSettingRequest(request) # If we have keyword arguments corresponding to fields on the @@ -816,8 +831,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, snapshot_settings_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -825,10 +840,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchSnapshotSettingRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchSnapshotSettingRequest): request = compute.PatchSnapshotSettingRequest(request) # If we have keyword arguments corresponding to fields on the @@ -928,8 +941,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, snapshot_settings_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -937,10 +950,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchSnapshotSettingRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchSnapshotSettingRequest): request = compute.PatchSnapshotSettingRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py index 8aee7895af3a..4ce07009bfff 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SnapshotsTransport]] = None, + transport: Optional[ + Union[str, SnapshotsTransport, Callable[..., SnapshotsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, SnapshotsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SnapshotsTransport,Callable[..., SnapshotsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SnapshotsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[SnapshotsTransport], Callable[..., SnapshotsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SnapshotsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -715,8 +727,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -724,10 +736,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSnapshotRequest): request = compute.DeleteSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -841,8 +851,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -850,10 +860,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSnapshotRequest): request = compute.DeleteSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -987,8 +995,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -996,10 +1004,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetSnapshotRequest): request = compute.GetSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1128,8 +1134,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1137,10 +1143,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicySnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicySnapshotRequest): request = compute.GetIamPolicySnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1250,8 +1254,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1259,10 +1263,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSnapshotRequest): request = compute.InsertSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1367,8 +1369,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, snapshot_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1376,10 +1378,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSnapshotRequest): request = compute.InsertSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1501,8 +1501,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1510,10 +1510,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListSnapshotsRequest): request = compute.ListSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1652,8 +1650,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_policy_request_resource] ) @@ -1663,10 +1661,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicySnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicySnapshotRequest): request = compute.SetIamPolicySnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1787,8 +1783,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -1798,10 +1794,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsSnapshotRequest): request = compute.SetLabelsSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1922,8 +1916,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, global_set_labels_request_resource] ) @@ -1933,10 +1927,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsSnapshotRequest): request = compute.SetLabelsSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2081,8 +2073,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, resource, test_permissions_request_resource] ) @@ -2092,10 +2084,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsSnapshotRequest): request = compute.TestIamPermissionsSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py index c238790c21ff..274c7f2dcc4d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_certificates/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SslCertificatesTransport]] = None, + transport: Optional[ + Union[ + str, SslCertificatesTransport, Callable[..., SslCertificatesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, SslCertificatesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SslCertificatesTransport,Callable[..., SslCertificatesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SslCertificatesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[SslCertificatesTransport], Callable[..., SslCertificatesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SslCertificatesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -714,8 +728,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -723,10 +737,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListSslCertificatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListSslCertificatesRequest): request = compute.AggregatedListSslCertificatesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -836,8 +848,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_certificate]) if request is not None and has_flattened_params: raise ValueError( @@ -845,10 +857,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSslCertificateRequest): request = compute.DeleteSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -956,8 +966,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_certificate]) if request is not None and has_flattened_params: raise ValueError( @@ -965,10 +975,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSslCertificateRequest): request = compute.DeleteSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1118,8 +1126,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_certificate]) if request is not None and has_flattened_params: raise ValueError( @@ -1127,10 +1135,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetSslCertificateRequest): request = compute.GetSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1236,8 +1242,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_certificate_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1245,10 +1251,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSslCertificateRequest): request = compute.InsertSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1349,8 +1353,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_certificate_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1358,10 +1362,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSslCertificateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSslCertificateRequest): request = compute.InsertSslCertificateRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1484,8 +1486,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1493,10 +1495,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListSslCertificatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListSslCertificatesRequest): request = compute.ListSslCertificatesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py index 7b8759d368b4..4280d5003d6b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/ssl_policies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SslPoliciesTransport]] = None, + transport: Optional[ + Union[str, SslPoliciesTransport, Callable[..., SslPoliciesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, SslPoliciesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SslPoliciesTransport,Callable[..., SslPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SslPoliciesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[SslPoliciesTransport], Callable[..., SslPoliciesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SslPoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -707,8 +719,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -716,10 +728,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListSslPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListSslPoliciesRequest): request = compute.AggregatedListSslPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -832,8 +842,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -841,10 +851,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSslPolicyRequest): request = compute.DeleteSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -955,8 +963,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -964,10 +972,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSslPolicyRequest): request = compute.DeleteSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1106,8 +1112,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_policy]) if request is not None and has_flattened_params: raise ValueError( @@ -1115,10 +1121,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetSslPolicyRequest): request = compute.GetSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1223,8 +1227,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1232,10 +1236,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSslPolicyRequest): request = compute.InsertSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1335,8 +1337,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1344,10 +1346,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSslPolicyRequest): request = compute.InsertSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1468,8 +1468,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1477,10 +1477,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListSslPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListSslPoliciesRequest): request = compute.ListSslPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1582,8 +1580,8 @@ def sample_list_available_features(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1591,10 +1589,8 @@ def sample_list_available_features(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListAvailableFeaturesSslPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListAvailableFeaturesSslPoliciesRequest): request = compute.ListAvailableFeaturesSslPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1703,8 +1699,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_policy, ssl_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1712,10 +1708,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchSslPolicyRequest): request = compute.PatchSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1833,8 +1827,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, ssl_policy, ssl_policy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1842,10 +1836,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchSslPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchSslPolicyRequest): request = compute.PatchSslPolicyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/__init__.py new file mode 100644 index 000000000000..3c75542da924 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import StoragePoolTypesClient + +__all__ = ("StoragePoolTypesClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py new file mode 100644 index 000000000000..aebf59fa880d --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/client.py @@ -0,0 +1,1056 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.cloud.compute_v1.services.storage_pool_types import pagers +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, StoragePoolTypesTransport +from .transports.rest import StoragePoolTypesRestTransport + + +class StoragePoolTypesClientMeta(type): + """Metaclass for the StoragePoolTypes client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[StoragePoolTypesTransport]] + _transport_registry["rest"] = StoragePoolTypesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[StoragePoolTypesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class StoragePoolTypesClient(metaclass=StoragePoolTypesClientMeta): + """The StoragePoolTypes API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StoragePoolTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StoragePoolTypesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> StoragePoolTypesTransport: + """Returns the transport used by the client instance. + + Returns: + StoragePoolTypesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = StoragePoolTypesClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = StoragePoolTypesClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = StoragePoolTypesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = StoragePoolTypesClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = StoragePoolTypesClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or StoragePoolTypesClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, StoragePoolTypesTransport, Callable[..., StoragePoolTypesTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the storage pool types client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,StoragePoolTypesTransport,Callable[..., StoragePoolTypesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the StoragePoolTypesTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = StoragePoolTypesClient._read_environment_variables() + self._client_cert_source = StoragePoolTypesClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = StoragePoolTypesClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, StoragePoolTypesTransport) + if transport_provided: + # transport is a StoragePoolTypesTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(StoragePoolTypesTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or StoragePoolTypesClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[StoragePoolTypesTransport], + Callable[..., StoragePoolTypesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., StoragePoolTypesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def aggregated_list( + self, + request: Optional[ + Union[compute.AggregatedListStoragePoolTypesRequest, dict] + ] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of storage pool types. To prevent + failure, Google recommends that you set the + ``returnPartialSuccess`` parameter to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.StoragePoolTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListStoragePoolTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListStoragePoolTypesRequest, dict]): + The request object. A request message for + StoragePoolTypes.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.storage_pool_types.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.AggregatedListStoragePoolTypesRequest): + request = compute.AggregatedListStoragePoolTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get( + self, + request: Optional[Union[compute.GetStoragePoolTypeRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePoolType: + r"""Returns the specified storage pool type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.StoragePoolTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetStoragePoolTypeRequest( + project="project_value", + storage_pool_type="storage_pool_type_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetStoragePoolTypeRequest, dict]): + The request object. A request message for + StoragePoolTypes.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool_type (str): + Name of the storage pool type to + return. + + This corresponds to the ``storage_pool_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.StoragePoolType: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool_type]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetStoragePoolTypeRequest): + request = compute.GetStoragePoolTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool_type is not None: + request.storage_pool_type = storage_pool_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("storage_pool_type", request.storage_pool_type), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list( + self, + request: Optional[Union[compute.ListStoragePoolTypesRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of storage pool types available to + the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.StoragePoolTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListStoragePoolTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListStoragePoolTypesRequest, dict]): + The request object. A request message for + StoragePoolTypes.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.storage_pool_types.pagers.ListPager: + Contains a list of storage pool + types. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListStoragePoolTypesRequest): + request = compute.ListStoragePoolTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "StoragePoolTypesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("StoragePoolTypesClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/pagers.py new file mode 100644 index 000000000000..122627627cd2 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/pagers.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.StoragePoolTypeAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.StoragePoolTypeAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.StoragePoolTypeAggregatedList], + request: compute.AggregatedListStoragePoolTypesRequest, + response: compute.StoragePoolTypeAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListStoragePoolTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.StoragePoolTypeAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListStoragePoolTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.StoragePoolTypeAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.StoragePoolTypesScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.StoragePoolTypesScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.StoragePoolTypeList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.StoragePoolTypeList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.StoragePoolTypeList], + request: compute.ListStoragePoolTypesRequest, + response: compute.StoragePoolTypeList, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListStoragePoolTypesRequest): + The initial request object. + response (google.cloud.compute_v1.types.StoragePoolTypeList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListStoragePoolTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.StoragePoolTypeList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.StoragePoolType]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/__init__.py new file mode 100644 index 000000000000..8a467b3504c4 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import StoragePoolTypesTransport +from .rest import StoragePoolTypesRestInterceptor, StoragePoolTypesRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[StoragePoolTypesTransport]] +_transport_registry["rest"] = StoragePoolTypesRestTransport + +__all__ = ( + "StoragePoolTypesTransport", + "StoragePoolTypesRestTransport", + "StoragePoolTypesRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/base.py new file mode 100644 index 000000000000..cda4e1f198f3 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/base.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class StoragePoolTypesTransport(abc.ABC): + """Abstract transport class for StoragePoolTypes.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListStoragePoolTypesRequest], + Union[ + compute.StoragePoolTypeAggregatedList, + Awaitable[compute.StoragePoolTypeAggregatedList], + ], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetStoragePoolTypeRequest], + Union[compute.StoragePoolType, Awaitable[compute.StoragePoolType]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListStoragePoolTypesRequest], + Union[compute.StoragePoolTypeList, Awaitable[compute.StoragePoolTypeList]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("StoragePoolTypesTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/rest.py new file mode 100644 index 000000000000..b8089806a3c4 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pool_types/transports/rest.py @@ -0,0 +1,561 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import StoragePoolTypesTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class StoragePoolTypesRestInterceptor: + """Interceptor for StoragePoolTypes. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the StoragePoolTypesRestTransport. + + .. code-block:: python + class MyCustomStoragePoolTypesInterceptor(StoragePoolTypesRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = StoragePoolTypesRestTransport(interceptor=MyCustomStoragePoolTypesInterceptor()) + client = StoragePoolTypesClient(transport=transport) + + + """ + + def pre_aggregated_list( + self, + request: compute.AggregatedListStoragePoolTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + compute.AggregatedListStoragePoolTypesRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePoolTypes server. + """ + return request, metadata + + def post_aggregated_list( + self, response: compute.StoragePoolTypeAggregatedList + ) -> compute.StoragePoolTypeAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the StoragePoolTypes server but before + it is returned to user code. + """ + return response + + def pre_get( + self, + request: compute.GetStoragePoolTypeRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetStoragePoolTypeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePoolTypes server. + """ + return request, metadata + + def post_get(self, response: compute.StoragePoolType) -> compute.StoragePoolType: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the StoragePoolTypes server but before + it is returned to user code. + """ + return response + + def pre_list( + self, + request: compute.ListStoragePoolTypesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.ListStoragePoolTypesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePoolTypes server. + """ + return request, metadata + + def post_list( + self, response: compute.StoragePoolTypeList + ) -> compute.StoragePoolTypeList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the StoragePoolTypes server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class StoragePoolTypesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: StoragePoolTypesRestInterceptor + + +class StoragePoolTypesRestTransport(StoragePoolTypesTransport): + """REST backend transport for StoragePoolTypes. + + The StoragePoolTypes API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[StoragePoolTypesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or StoragePoolTypesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(StoragePoolTypesRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.AggregatedListStoragePoolTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePoolTypeAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListStoragePoolTypesRequest): + The request object. A request message for + StoragePoolTypes.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.StoragePoolTypeAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/storagePoolTypes", + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListStoragePoolTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.StoragePoolTypeAggregatedList() + pb_resp = compute.StoragePoolTypeAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Get(StoragePoolTypesRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetStoragePoolTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePoolType: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetStoragePoolTypeRequest): + The request object. A request message for + StoragePoolTypes.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.StoragePoolType: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePoolTypes/{storage_pool_type}", + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetStoragePoolTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.StoragePoolType() + pb_resp = compute.StoragePoolType.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _List(StoragePoolTypesRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.ListStoragePoolTypesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePoolTypeList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListStoragePoolTypesRequest): + The request object. A request message for + StoragePoolTypes.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.StoragePoolTypeList: + Contains a list of storage pool + types. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePoolTypes", + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListStoragePoolTypesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.StoragePoolTypeList() + pb_resp = compute.StoragePoolTypeList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListStoragePoolTypesRequest], + compute.StoragePoolTypeAggregatedList, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def get( + self, + ) -> Callable[[compute.GetStoragePoolTypeRequest], compute.StoragePoolType]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[[compute.ListStoragePoolTypesRequest], compute.StoragePoolTypeList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("StoragePoolTypesRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/__init__.py new file mode 100644 index 000000000000..0c35d4dee3a1 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import StoragePoolsClient + +__all__ = ("StoragePoolsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py new file mode 100644 index 000000000000..8fec2f8579ff --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/client.py @@ -0,0 +1,2527 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +from google.api_core import extended_operation # type: ignore + +from google.cloud.compute_v1.services.storage_pools import pagers +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, StoragePoolsTransport +from .transports.rest import StoragePoolsRestTransport + + +class StoragePoolsClientMeta(type): + """Metaclass for the StoragePools client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[StoragePoolsTransport]] + _transport_registry["rest"] = StoragePoolsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[StoragePoolsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class StoragePoolsClient(metaclass=StoragePoolsClientMeta): + """The StoragePools API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StoragePoolsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + StoragePoolsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> StoragePoolsTransport: + """Returns the transport used by the client instance. + + Returns: + StoragePoolsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = StoragePoolsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = StoragePoolsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = StoragePoolsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = StoragePoolsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = StoragePoolsClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or StoragePoolsClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, StoragePoolsTransport, Callable[..., StoragePoolsTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the storage pools client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,StoragePoolsTransport,Callable[..., StoragePoolsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the StoragePoolsTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = StoragePoolsClient._read_environment_variables() + self._client_cert_source = StoragePoolsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = StoragePoolsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, StoragePoolsTransport) + if transport_provided: + # transport is a StoragePoolsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(StoragePoolsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or StoragePoolsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[StoragePoolsTransport], Callable[..., StoragePoolsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., StoragePoolsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + def aggregated_list( + self, + request: Optional[ + Union[compute.AggregatedListStoragePoolsRequest, dict] + ] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves an aggregated list of storage pools. To prevent + failure, Google recommends that you set the + ``returnPartialSuccess`` parameter to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListStoragePoolsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListStoragePoolsRequest, dict]): + The request object. A request message for + StoragePools.AggregatedList. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.storage_pools.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.AggregatedListStoragePoolsRequest): + request = compute.AggregatedListStoragePoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_unary( + self, + request: Optional[Union[compute.DeleteStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Deletes the specified storage pool. Deleting a + storagePool removes its data permanently and is + irreversible. However, deleting a storagePool does not + delete any snapshots previously made from the + storagePool. You must separately delete snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteStoragePoolRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool (str): + Name of the storage pool to delete. + This corresponds to the ``storage_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteStoragePoolRequest): + request = compute.DeleteStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool is not None: + request.storage_pool = storage_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("storage_pool", request.storage_pool), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[Union[compute.DeleteStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified storage pool. Deleting a + storagePool removes its data permanently and is + irreversible. However, deleting a storagePool does not + delete any snapshots previously made from the + storagePool. You must separately delete snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteStoragePoolRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool (str): + Name of the storage pool to delete. + This corresponds to the ``storage_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteStoragePoolRequest): + request = compute.DeleteStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool is not None: + request.storage_pool = storage_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("storage_pool", request.storage_pool), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get( + self, + request: Optional[Union[compute.GetStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePool: + r"""Returns a specified storage pool. Gets a list of + available storage pools by making a list() request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.GetStoragePoolRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool (str): + Name of the storage pool to return. + This corresponds to the ``storage_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.StoragePool: + Represents a zonal storage pool + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetStoragePoolRequest): + request = compute.GetStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool is not None: + request.storage_pool = storage_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("storage_pool", request.storage_pool), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[Union[compute.GetIamPolicyStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyStoragePoolRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetIamPolicyStoragePoolRequest): + request = compute.GetIamPolicyStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[Union[compute.InsertStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool_resource: Optional[compute.StoragePool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Creates a storage pool in the specified project using + the data in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.InsertStoragePoolRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool_resource (google.cloud.compute_v1.types.StoragePool): + The body resource for this request + This corresponds to the ``storage_pool_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertStoragePoolRequest): + request = compute.InsertStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool_resource is not None: + request.storage_pool_resource = storage_pool_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[Union[compute.InsertStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool_resource: Optional[compute.StoragePool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a storage pool in the specified project using + the data in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.InsertStoragePoolRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool_resource (google.cloud.compute_v1.types.StoragePool): + The body resource for this request + This corresponds to the ``storage_pool_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertStoragePoolRequest): + request = compute.InsertStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool_resource is not None: + request.storage_pool_resource = storage_pool_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[Union[compute.ListStoragePoolsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of storage pools contained within + the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.ListStoragePoolsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListStoragePoolsRequest, dict]): + The request object. A request message for + StoragePools.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.storage_pools.pagers.ListPager: + A list of StoragePool resources. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListStoragePoolsRequest): + request = compute.ListStoragePoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_disks( + self, + request: Optional[Union[compute.ListDisksStoragePoolsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDisksPager: + r"""Lists the disks in a specified storage pool. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_disks(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.ListDisksStoragePoolsRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_disks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListDisksStoragePoolsRequest, dict]): + The request object. A request message for + StoragePools.ListDisks. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool (str): + Name of the storage pool to list + disks of. + + This corresponds to the ``storage_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.services.storage_pools.pagers.ListDisksPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListDisksStoragePoolsRequest): + request = compute.ListDisksStoragePoolsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool is not None: + request.storage_pool = storage_pool + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_disks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("storage_pool", request.storage_pool), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDisksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[Union[compute.SetIamPolicyStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + zone_set_policy_request_resource: Optional[compute.ZoneSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyStoragePoolRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + This corresponds to the ``zone_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + Policy is a collection of bindings. A binding binds one + or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role is + a named list of permissions; each role can be an IAM + predefined role or a user-created custom role. For some + types of Google Cloud resources, a binding can also + specify a condition, which is a logical expression that + allows access to a resource only if the expression + evaluates to true. A condition can add constraints based + on attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the [IAM + documentation](\ https://cloud.google.com/iam/help/conditions/resource-policies). + **JSON example:** + :literal:`\` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + **YAML example:** + :literal:`\` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + For a description of IAM and its features, see the [IAM + documentation](\ https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, resource, zone_set_policy_request_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.SetIamPolicyStoragePoolRequest): + request = compute.SetIamPolicyStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if zone_set_policy_request_resource is not None: + request.zone_set_policy_request_resource = ( + zone_set_policy_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[ + Union[compute.TestIamPermissionsStoragePoolRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[ + compute.TestPermissionsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsStoragePoolRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.TestIamPermissions. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [project, zone, resource, test_permissions_request_resource] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.TestIamPermissionsStoragePoolRequest): + request = compute.TestIamPermissionsStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = ( + test_permissions_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary( + self, + request: Optional[Union[compute.UpdateStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool: Optional[str] = None, + storage_pool_resource: Optional[compute.StoragePool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Updates the specified storagePool with the data included in the + request. The update is performed only on selected fields + included as part of update-mask. Only the following fields can + be modified: size_tb and provisioned_iops. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateStoragePoolRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool (str): + The storagePool name for this + request. + + This corresponds to the ``storage_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool_resource (google.cloud.compute_v1.types.StoragePool): + The body resource for this request + This corresponds to the ``storage_pool_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool, storage_pool_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateStoragePoolRequest): + request = compute.UpdateStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool is not None: + request.storage_pool = storage_pool + if storage_pool_resource is not None: + request.storage_pool_resource = storage_pool_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("storage_pool", request.storage_pool), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update( + self, + request: Optional[Union[compute.UpdateStoragePoolRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + storage_pool: Optional[str] = None, + storage_pool_resource: Optional[compute.StoragePool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified storagePool with the data included in the + request. The update is performed only on selected fields + included as part of update-mask. Only the following fields can + be modified: size_tb and provisioned_iops. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateStoragePoolRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateStoragePoolRequest, dict]): + The request object. A request message for + StoragePools.Update. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool (str): + The storagePool name for this + request. + + This corresponds to the ``storage_pool`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + storage_pool_resource (google.cloud.compute_v1.types.StoragePool): + The body resource for this request + This corresponds to the ``storage_pool_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project, zone, storage_pool, storage_pool_resource]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateStoragePoolRequest): + request = compute.UpdateStoragePoolRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if storage_pool is not None: + request.storage_pool = storage_pool + if storage_pool_resource is not None: + request.storage_pool_resource = storage_pool_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("storage_pool", request.storage_pool), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "StoragePoolsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +__all__ = ("StoragePoolsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/pagers.py new file mode 100644 index 000000000000..1e8895e13b8b --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/pagers.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, +) + +from google.cloud.compute_v1.types import compute + + +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.StoragePoolAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.StoragePoolAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.StoragePoolAggregatedList], + request: compute.AggregatedListStoragePoolsRequest, + response: compute.StoragePoolAggregatedList, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListStoragePoolsRequest): + The initial request object. + response (google.cloud.compute_v1.types.StoragePoolAggregatedList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.AggregatedListStoragePoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.StoragePoolAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.StoragePoolsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.StoragePoolsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.StoragePoolList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.StoragePoolList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.StoragePoolList], + request: compute.ListStoragePoolsRequest, + response: compute.StoragePoolList, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListStoragePoolsRequest): + The initial request object. + response (google.cloud.compute_v1.types.StoragePoolList): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListStoragePoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.StoragePoolList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.StoragePool]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDisksPager: + """A pager for iterating through ``list_disks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.StoragePoolListDisks` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDisks`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.StoragePoolListDisks` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.StoragePoolListDisks], + request: compute.ListDisksStoragePoolsRequest, + response: compute.StoragePoolListDisks, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListDisksStoragePoolsRequest): + The initial request object. + response (google.cloud.compute_v1.types.StoragePoolListDisks): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = compute.ListDisksStoragePoolsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.StoragePoolListDisks]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[compute.StoragePoolDisk]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/__init__.py new file mode 100644 index 000000000000..3c79a1aae6ee --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import StoragePoolsTransport +from .rest import StoragePoolsRestInterceptor, StoragePoolsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[StoragePoolsTransport]] +_transport_registry["rest"] = StoragePoolsRestTransport + +__all__ = ( + "StoragePoolsTransport", + "StoragePoolsRestTransport", + "StoragePoolsRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/base.py new file mode 100644 index 000000000000..2c2ed56927f4 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/base.py @@ -0,0 +1,307 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import zone_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + + +class StoragePoolsTransport(abc.ABC): + """Abstract transport class for StoragePools.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + self._extended_operations_services: Dict[str, Any] = {} + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_timeout=None, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=None, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=None, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_timeout=None, + client_info=client_info, + ), + self.list_disks: gapic_v1.method.wrap_method( + self.list_disks, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListStoragePoolsRequest], + Union[ + compute.StoragePoolAggregatedList, + Awaitable[compute.StoragePoolAggregatedList], + ], + ]: + raise NotImplementedError() + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteStoragePoolRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetStoragePoolRequest], + Union[compute.StoragePool, Awaitable[compute.StoragePool]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [compute.GetIamPolicyStoragePoolRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertStoragePoolRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListStoragePoolsRequest], + Union[compute.StoragePoolList, Awaitable[compute.StoragePoolList]], + ]: + raise NotImplementedError() + + @property + def list_disks( + self, + ) -> Callable[ + [compute.ListDisksStoragePoolsRequest], + Union[compute.StoragePoolListDisks, Awaitable[compute.StoragePoolListDisks]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [compute.SetIamPolicyStoragePoolRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsStoragePoolRequest], + Union[ + compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse] + ], + ]: + raise NotImplementedError() + + @property + def update( + self, + ) -> Callable[ + [compute.UpdateStoragePoolRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("StoragePoolsTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/rest.py new file mode 100644 index 000000000000..bb7af8c338e4 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/storage_pools/transports/rest.py @@ -0,0 +1,1535 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import StoragePoolsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class StoragePoolsRestInterceptor: + """Interceptor for StoragePools. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the StoragePoolsRestTransport. + + .. code-block:: python + class MyCustomStoragePoolsInterceptor(StoragePoolsRestInterceptor): + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_disks(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_disks(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = StoragePoolsRestTransport(interceptor=MyCustomStoragePoolsInterceptor()) + client = StoragePoolsClient(transport=transport) + + + """ + + def pre_aggregated_list( + self, + request: compute.AggregatedListStoragePoolsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.AggregatedListStoragePoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_aggregated_list( + self, response: compute.StoragePoolAggregatedList + ) -> compute.StoragePoolAggregatedList: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_delete( + self, + request: compute.DeleteStoragePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.DeleteStoragePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_get( + self, + request: compute.GetStoragePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetStoragePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_get(self, response: compute.StoragePool) -> compute.StoragePool: + """Post-rpc interceptor for get + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: compute.GetIamPolicyStoragePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.GetIamPolicyStoragePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_insert( + self, + request: compute.InsertStoragePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.InsertStoragePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_list( + self, + request: compute.ListStoragePoolsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.ListStoragePoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_list(self, response: compute.StoragePoolList) -> compute.StoragePoolList: + """Post-rpc interceptor for list + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_list_disks( + self, + request: compute.ListDisksStoragePoolsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.ListDisksStoragePoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_disks + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_list_disks( + self, response: compute.StoragePoolListDisks + ) -> compute.StoragePoolListDisks: + """Post-rpc interceptor for list_disks + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: compute.SetIamPolicyStoragePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.SetIamPolicyStoragePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: compute.TestIamPermissionsStoragePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.TestIamPermissionsStoragePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: compute.TestPermissionsResponse + ) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + def pre_update( + self, + request: compute.UpdateStoragePoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[compute.UpdateStoragePoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the StoragePools server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + Override in a subclass to manipulate the response + after it is returned by the StoragePools server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class StoragePoolsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: StoragePoolsRestInterceptor + + +class StoragePoolsRestTransport(StoragePoolsTransport): + """REST backend transport for StoragePools. + + The StoragePools API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[StoragePoolsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or StoragePoolsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedList(StoragePoolsRestStub): + def __hash__(self): + return hash("AggregatedList") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.AggregatedListStoragePoolsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePoolAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListStoragePoolsRequest): + The request object. A request message for + StoragePools.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.StoragePoolAggregatedList: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/storagePools", + }, + ] + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + pb_request = compute.AggregatedListStoragePoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.StoragePoolAggregatedList() + pb_resp = compute.StoragePoolAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list(resp) + return resp + + class _Delete(StoragePoolsRestStub): + def __hash__(self): + return hash("Delete") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.DeleteStoragePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteStoragePoolRequest): + The request object. A request message for + StoragePools.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + Note that completed Operation resources have a limited + retention period. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}", + }, + ] + request, metadata = self._interceptor.pre_delete(request, metadata) + pb_request = compute.DeleteStoragePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete(resp) + return resp + + class _Get(StoragePoolsRestStub): + def __hash__(self): + return hash("Get") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetStoragePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePool: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetStoragePoolRequest): + The request object. A request message for + StoragePools.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.StoragePool: + Represents a zonal storage pool + resource. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}", + }, + ] + request, metadata = self._interceptor.pre_get(request, metadata) + pb_request = compute.GetStoragePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.StoragePool() + pb_resp = compute.StoragePool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get(resp) + return resp + + class _GetIamPolicy(StoragePoolsRestStub): + def __hash__(self): + return hash("GetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.GetIamPolicyStoragePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyStoragePoolRequest): + The request object. A request message for + StoragePools.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools/{resource}/getIamPolicy", + }, + ] + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + pb_request = compute.GetIamPolicyStoragePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + class _Insert(StoragePoolsRestStub): + def __hash__(self): + return hash("Insert") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.InsertStoragePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertStoragePoolRequest): + The request object. A request message for + StoragePools.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + Note that completed Operation resources have a limited + retention period. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools", + "body": "storage_pool_resource", + }, + ] + request, metadata = self._interceptor.pre_insert(request, metadata) + pb_request = compute.InsertStoragePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_insert(resp) + return resp + + class _List(StoragePoolsRestStub): + def __hash__(self): + return hash("List") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.ListStoragePoolsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePoolList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListStoragePoolsRequest): + The request object. A request message for + StoragePools.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.StoragePoolList: + A list of StoragePool resources. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools", + }, + ] + request, metadata = self._interceptor.pre_list(request, metadata) + pb_request = compute.ListStoragePoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.StoragePoolList() + pb_resp = compute.StoragePoolList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list(resp) + return resp + + class _ListDisks(StoragePoolsRestStub): + def __hash__(self): + return hash("ListDisks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.ListDisksStoragePoolsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.StoragePoolListDisks: + r"""Call the list disks method over HTTP. + + Args: + request (~.compute.ListDisksStoragePoolsRequest): + The request object. A request message for + StoragePools.ListDisks. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.StoragePoolListDisks: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}/listDisks", + }, + ] + request, metadata = self._interceptor.pre_list_disks(request, metadata) + pb_request = compute.ListDisksStoragePoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.StoragePoolListDisks() + pb_resp = compute.StoragePoolListDisks.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_disks(resp) + return resp + + class _SetIamPolicy(StoragePoolsRestStub): + def __hash__(self): + return hash("SetIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.SetIamPolicyStoragePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyStoragePoolRequest): + The request object. A request message for + StoragePools.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. A + ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. For some types of Google + Cloud resources, a ``binding`` can also specify a + ``condition``, which is a logical expression that allows + access to a resource only if the expression evaluates to + ``true``. A condition can add constraints based on + attributes of the request, the resource, or both. To + learn which resources support conditions in their IAM + policies, see the `IAM + documentation `__. + **JSON example:** + ``{ "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`` + **YAML example:** + ``bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`` + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools/{resource}/setIamPolicy", + "body": "zone_set_policy_request_resource", + }, + ] + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + pb_request = compute.SetIamPolicyStoragePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + class _TestIamPermissions(StoragePoolsRestStub): + def __hash__(self): + return hash("TestIamPermissions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.TestIamPermissionsStoragePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsStoragePoolRequest): + The request object. A request message for + StoragePools.TestIamPermissions. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + pb_request = compute.TestIamPermissionsStoragePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + class _Update(StoragePoolsRestStub): + def __hash__(self): + return hash("Update") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: compute.UpdateStoragePoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateStoragePoolRequest): + The request object. A request message for + StoragePools.Update. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.compute.Operation: + Represents an Operation resource. Google Compute Engine + has three Operation resources: \* + `Global `__ + \* + `Regional `__ + \* + `Zonal `__ + You can use an operation resource to manage asynchronous + API requests. For more information, read Handling API + responses. Operations can be global, regional or zonal. + - For global operations, use the ``globalOperations`` + resource. - For regional operations, use the + ``regionOperations`` resource. - For zonal operations, + use the ``zoneOperations`` resource. For more + information, read Global, Regional, and Zonal Resources. + Note that completed Operation resources have a limited + retention period. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}", + "body": "storage_pool_resource", + }, + ] + request, metadata = self._interceptor.pre_update(request, metadata) + pb_request = compute.UpdateStoragePoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update(resp) + return resp + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListStoragePoolsRequest], compute.StoragePoolAggregatedList + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete(self) -> Callable[[compute.DeleteStoragePoolRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[[compute.GetStoragePoolRequest], compute.StoragePool]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyStoragePoolRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert(self) -> Callable[[compute.InsertStoragePoolRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[[compute.ListStoragePoolsRequest], compute.StoragePoolList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_disks( + self, + ) -> Callable[[compute.ListDisksStoragePoolsRequest], compute.StoragePoolListDisks]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDisks(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyStoragePoolRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsStoragePoolRequest], compute.TestPermissionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update(self) -> Callable[[compute.UpdateStoragePoolRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("StoragePoolsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py index 9c3b9a6303ea..5b353c31cb86 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/subnetworks/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SubnetworksTransport]] = None, + transport: Optional[ + Union[str, SubnetworksTransport, Callable[..., SubnetworksTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, SubnetworksTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SubnetworksTransport,Callable[..., SubnetworksTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SubnetworksTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[SubnetworksTransport], Callable[..., SubnetworksTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SubnetworksTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -704,8 +716,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -713,10 +725,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListSubnetworksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListSubnetworksRequest): request = compute.AggregatedListSubnetworksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -835,8 +845,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, subnetwork]) if request is not None and has_flattened_params: raise ValueError( @@ -844,10 +854,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSubnetworkRequest): request = compute.DeleteSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -967,8 +975,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, subnetwork]) if request is not None and has_flattened_params: raise ValueError( @@ -976,10 +984,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteSubnetworkRequest): request = compute.DeleteSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1135,8 +1141,8 @@ def sample_expand_ip_cidr_range(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1151,10 +1157,8 @@ def sample_expand_ip_cidr_range(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ExpandIpCidrRangeSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ExpandIpCidrRangeSubnetworkRequest): request = compute.ExpandIpCidrRangeSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1289,8 +1293,8 @@ def sample_expand_ip_cidr_range(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1305,10 +1309,8 @@ def sample_expand_ip_cidr_range(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ExpandIpCidrRangeSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ExpandIpCidrRangeSubnetworkRequest): request = compute.ExpandIpCidrRangeSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1462,8 +1464,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, subnetwork]) if request is not None and has_flattened_params: raise ValueError( @@ -1471,10 +1473,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetSubnetworkRequest): request = compute.GetSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1615,8 +1615,8 @@ def sample_get_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1624,10 +1624,8 @@ def sample_get_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetIamPolicySubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetIamPolicySubnetworkRequest): request = compute.GetIamPolicySubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1745,8 +1743,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, subnetwork_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1754,10 +1752,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSubnetworkRequest): request = compute.InsertSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1874,8 +1870,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, subnetwork_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1883,10 +1879,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertSubnetworkRequest): request = compute.InsertSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2026,8 +2020,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -2035,10 +2029,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListSubnetworksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListSubnetworksRequest): request = compute.ListSubnetworksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2149,8 +2141,8 @@ def sample_list_usable(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -2158,10 +2150,8 @@ def sample_list_usable(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListUsableSubnetworksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListUsableSubnetworksRequest): request = compute.ListUsableSubnetworksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2290,8 +2280,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, subnetwork, subnetwork_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2299,10 +2289,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchSubnetworkRequest): request = compute.PatchSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2434,8 +2422,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, subnetwork, subnetwork_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2443,10 +2431,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchSubnetworkRequest): request = compute.PatchSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2622,8 +2608,8 @@ def sample_set_iam_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_policy_request_resource] ) @@ -2633,10 +2619,8 @@ def sample_set_iam_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetIamPolicySubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetIamPolicySubnetworkRequest): request = compute.SetIamPolicySubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2770,8 +2754,8 @@ def sample_set_private_ip_google_access(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2786,10 +2770,8 @@ def sample_set_private_ip_google_access(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetPrivateIpGoogleAccessSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetPrivateIpGoogleAccessSubnetworkRequest): request = compute.SetPrivateIpGoogleAccessSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2925,8 +2907,8 @@ def sample_set_private_ip_google_access(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2941,10 +2923,8 @@ def sample_set_private_ip_google_access(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetPrivateIpGoogleAccessSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetPrivateIpGoogleAccessSubnetworkRequest): request = compute.SetPrivateIpGoogleAccessSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3104,8 +3084,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -3115,10 +3095,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsSubnetworkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsSubnetworkRequest): request = compute.TestIamPermissionsSubnetworkRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py index 331e7797565b..160f62c54058 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_grpc_proxies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TargetGrpcProxiesTransport]] = None, + transport: Optional[ + Union[ + str, + TargetGrpcProxiesTransport, + Callable[..., TargetGrpcProxiesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, TargetGrpcProxiesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,TargetGrpcProxiesTransport,Callable[..., TargetGrpcProxiesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TargetGrpcProxiesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[TargetGrpcProxiesTransport], + Callable[..., TargetGrpcProxiesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TargetGrpcProxiesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -715,8 +732,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_grpc_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -724,10 +741,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetGrpcProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetGrpcProxyRequest): request = compute.DeleteTargetGrpcProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -836,8 +851,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_grpc_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -845,10 +860,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetGrpcProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetGrpcProxyRequest): request = compute.DeleteTargetGrpcProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -986,8 +999,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_grpc_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -995,10 +1008,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetTargetGrpcProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetTargetGrpcProxyRequest): request = compute.GetTargetGrpcProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1105,8 +1116,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_grpc_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1114,10 +1125,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetGrpcProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetGrpcProxyRequest): request = compute.InsertTargetGrpcProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1219,8 +1228,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_grpc_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1228,10 +1237,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetGrpcProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetGrpcProxyRequest): request = compute.InsertTargetGrpcProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1352,8 +1359,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1361,10 +1368,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListTargetGrpcProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListTargetGrpcProxiesRequest): request = compute.ListTargetGrpcProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1483,8 +1488,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_grpc_proxy, target_grpc_proxy_resource] ) @@ -1494,10 +1499,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchTargetGrpcProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchTargetGrpcProxyRequest): request = compute.PatchTargetGrpcProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1616,8 +1619,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_grpc_proxy, target_grpc_proxy_resource] ) @@ -1627,10 +1630,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchTargetGrpcProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchTargetGrpcProxyRequest): request = compute.PatchTargetGrpcProxyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py index df6fa65b1076..44fd28387395 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_http_proxies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TargetHttpProxiesTransport]] = None, + transport: Optional[ + Union[ + str, + TargetHttpProxiesTransport, + Callable[..., TargetHttpProxiesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, TargetHttpProxiesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,TargetHttpProxiesTransport,Callable[..., TargetHttpProxiesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TargetHttpProxiesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[TargetHttpProxiesTransport], + Callable[..., TargetHttpProxiesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TargetHttpProxiesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -714,8 +731,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -723,10 +740,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListTargetHttpProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListTargetHttpProxiesRequest): request = compute.AggregatedListTargetHttpProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -836,8 +851,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_http_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -845,10 +860,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetHttpProxyRequest): request = compute.DeleteTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -956,8 +969,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_http_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -965,10 +978,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetHttpProxyRequest): request = compute.DeleteTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1114,8 +1125,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_http_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -1123,10 +1134,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetTargetHttpProxyRequest): request = compute.GetTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1232,8 +1241,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_http_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1241,10 +1250,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetHttpProxyRequest): request = compute.InsertTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1345,8 +1352,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_http_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1354,10 +1361,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetHttpProxyRequest): request = compute.InsertTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1480,8 +1485,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1489,10 +1494,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListTargetHttpProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListTargetHttpProxiesRequest): request = compute.ListTargetHttpProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1611,8 +1614,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_http_proxy, target_http_proxy_resource] ) @@ -1622,10 +1625,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchTargetHttpProxyRequest): request = compute.PatchTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1744,8 +1745,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_http_proxy, target_http_proxy_resource] ) @@ -1755,10 +1756,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchTargetHttpProxyRequest): request = compute.PatchTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1898,8 +1897,8 @@ def sample_set_url_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_http_proxy, url_map_reference_resource] ) @@ -1909,10 +1908,8 @@ def sample_set_url_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUrlMapTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUrlMapTargetHttpProxyRequest): request = compute.SetUrlMapTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2028,8 +2025,8 @@ def sample_set_url_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_http_proxy, url_map_reference_resource] ) @@ -2039,10 +2036,8 @@ def sample_set_url_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUrlMapTargetHttpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUrlMapTargetHttpProxyRequest): request = compute.SetUrlMapTargetHttpProxyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py index fc67961f38cd..37bfb2ac8e85 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_https_proxies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TargetHttpsProxiesTransport]] = None, + transport: Optional[ + Union[ + str, + TargetHttpsProxiesTransport, + Callable[..., TargetHttpsProxiesTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, TargetHttpsProxiesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,TargetHttpsProxiesTransport,Callable[..., TargetHttpsProxiesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TargetHttpsProxiesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[TargetHttpsProxiesTransport], + Callable[..., TargetHttpsProxiesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TargetHttpsProxiesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -714,8 +731,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -723,10 +740,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListTargetHttpsProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListTargetHttpsProxiesRequest): request = compute.AggregatedListTargetHttpsProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -836,8 +851,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_https_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -845,10 +860,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetHttpsProxyRequest): request = compute.DeleteTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -956,8 +969,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_https_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -965,10 +978,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetHttpsProxyRequest): request = compute.DeleteTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1114,8 +1125,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_https_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -1123,10 +1134,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetTargetHttpsProxyRequest): request = compute.GetTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1232,8 +1241,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_https_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1241,10 +1250,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetHttpsProxyRequest): request = compute.InsertTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1345,8 +1352,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_https_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1354,10 +1361,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetHttpsProxyRequest): request = compute.InsertTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1480,8 +1485,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1489,10 +1494,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListTargetHttpsProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListTargetHttpsProxiesRequest): request = compute.ListTargetHttpsProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1611,8 +1614,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_https_proxy, target_https_proxy_resource] ) @@ -1622,10 +1625,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchTargetHttpsProxyRequest): request = compute.PatchTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1744,8 +1745,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_https_proxy, target_https_proxy_resource] ) @@ -1755,10 +1756,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchTargetHttpsProxyRequest): request = compute.PatchTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1904,8 +1903,8 @@ def sample_set_certificate_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1919,10 +1918,8 @@ def sample_set_certificate_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetCertificateMapTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetCertificateMapTargetHttpsProxyRequest): request = compute.SetCertificateMapTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2046,8 +2043,8 @@ def sample_set_certificate_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2061,10 +2058,8 @@ def sample_set_certificate_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetCertificateMapTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetCertificateMapTargetHttpsProxyRequest): request = compute.SetCertificateMapTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2211,8 +2206,8 @@ def sample_set_quic_override(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2226,10 +2221,8 @@ def sample_set_quic_override(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetQuicOverrideTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetQuicOverrideTargetHttpsProxyRequest): request = compute.SetQuicOverrideTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2352,8 +2345,8 @@ def sample_set_quic_override(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2367,10 +2360,8 @@ def sample_set_quic_override(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetQuicOverrideTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetQuicOverrideTargetHttpsProxyRequest): request = compute.SetQuicOverrideTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2516,8 +2507,8 @@ def sample_set_ssl_certificates(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2531,10 +2522,8 @@ def sample_set_ssl_certificates(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslCertificatesTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSslCertificatesTargetHttpsProxyRequest): request = compute.SetSslCertificatesTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2656,8 +2645,8 @@ def sample_set_ssl_certificates(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2671,10 +2660,8 @@ def sample_set_ssl_certificates(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslCertificatesTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSslCertificatesTargetHttpsProxyRequest): request = compute.SetSslCertificatesTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2824,8 +2811,8 @@ def sample_set_ssl_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_https_proxy, ssl_policy_reference_resource] ) @@ -2835,10 +2822,8 @@ def sample_set_ssl_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslPolicyTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSslPolicyTargetHttpsProxyRequest): request = compute.SetSslPolicyTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2962,8 +2947,8 @@ def sample_set_ssl_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_https_proxy, ssl_policy_reference_resource] ) @@ -2973,10 +2958,8 @@ def sample_set_ssl_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslPolicyTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSslPolicyTargetHttpsProxyRequest): request = compute.SetSslPolicyTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3116,8 +3099,8 @@ def sample_set_url_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_https_proxy, url_map_reference_resource] ) @@ -3127,10 +3110,8 @@ def sample_set_url_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUrlMapTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUrlMapTargetHttpsProxyRequest): request = compute.SetUrlMapTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3246,8 +3227,8 @@ def sample_set_url_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_https_proxy, url_map_reference_resource] ) @@ -3257,10 +3238,8 @@ def sample_set_url_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetUrlMapTargetHttpsProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetUrlMapTargetHttpsProxyRequest): request = compute.SetUrlMapTargetHttpsProxyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py index 47641e0f6690..16da0f336e4c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_instances/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TargetInstancesTransport]] = None, + transport: Optional[ + Union[ + str, TargetInstancesTransport, Callable[..., TargetInstancesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, TargetInstancesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,TargetInstancesTransport,Callable[..., TargetInstancesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TargetInstancesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[TargetInstancesTransport], Callable[..., TargetInstancesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TargetInstancesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -711,8 +725,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -720,10 +734,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListTargetInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListTargetInstancesRequest): request = compute.AggregatedListTargetInstancesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -842,8 +854,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, target_instance]) if request is not None and has_flattened_params: raise ValueError( @@ -851,10 +863,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetInstanceRequest): request = compute.DeleteTargetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -974,8 +984,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, target_instance]) if request is not None and has_flattened_params: raise ValueError( @@ -983,10 +993,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetInstanceRequest): request = compute.DeleteTargetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1137,8 +1145,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, target_instance]) if request is not None and has_flattened_params: raise ValueError( @@ -1146,10 +1154,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetTargetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetTargetInstanceRequest): request = compute.GetTargetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1267,8 +1273,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, target_instance_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1276,10 +1282,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetInstanceRequest): request = compute.InsertTargetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1396,8 +1400,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, target_instance_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1405,10 +1409,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetInstanceRequest): request = compute.InsertTargetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1548,8 +1550,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -1557,10 +1559,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListTargetInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListTargetInstancesRequest): request = compute.ListTargetInstancesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1699,8 +1699,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, target_instance, security_policy_reference_resource] ) @@ -1710,10 +1710,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyTargetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSecurityPolicyTargetInstanceRequest): request = compute.SetSecurityPolicyTargetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1850,8 +1848,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, zone, target_instance, security_policy_reference_resource] ) @@ -1861,10 +1859,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyTargetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSecurityPolicyTargetInstanceRequest): request = compute.SetSecurityPolicyTargetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py index 779e2164a81e..fe4b4a2380d3 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_pools/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TargetPoolsTransport]] = None, + transport: Optional[ + Union[str, TargetPoolsTransport, Callable[..., TargetPoolsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, TargetPoolsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,TargetPoolsTransport,Callable[..., TargetPoolsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TargetPoolsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[TargetPoolsTransport], Callable[..., TargetPoolsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TargetPoolsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -726,8 +738,8 @@ def sample_add_health_check(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -742,10 +754,8 @@ def sample_add_health_check(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddHealthCheckTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddHealthCheckTargetPoolRequest): request = compute.AddHealthCheckTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -877,8 +887,8 @@ def sample_add_health_check(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -893,10 +903,8 @@ def sample_add_health_check(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddHealthCheckTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddHealthCheckTargetPoolRequest): request = compute.AddHealthCheckTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1053,8 +1061,8 @@ def sample_add_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_pool, target_pools_add_instance_request_resource] ) @@ -1064,10 +1072,8 @@ def sample_add_instance(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddInstanceTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddInstanceTargetPoolRequest): request = compute.AddInstanceTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1199,8 +1205,8 @@ def sample_add_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_pool, target_pools_add_instance_request_resource] ) @@ -1210,10 +1216,8 @@ def sample_add_instance(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AddInstanceTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AddInstanceTargetPoolRequest): request = compute.AddInstanceTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1348,8 +1352,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1357,10 +1361,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListTargetPoolsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListTargetPoolsRequest): request = compute.AggregatedListTargetPoolsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1479,8 +1481,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_pool]) if request is not None and has_flattened_params: raise ValueError( @@ -1488,10 +1490,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetPoolRequest): request = compute.DeleteTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1611,8 +1611,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_pool]) if request is not None and has_flattened_params: raise ValueError( @@ -1620,10 +1620,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetPoolRequest): request = compute.DeleteTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1774,8 +1772,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_pool]) if request is not None and has_flattened_params: raise ValueError( @@ -1783,10 +1781,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetTargetPoolRequest): request = compute.GetTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1912,8 +1908,8 @@ def sample_get_health(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_pool, instance_reference_resource] ) @@ -1923,10 +1919,8 @@ def sample_get_health(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetHealthTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetHealthTargetPoolRequest): request = compute.GetHealthTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2046,8 +2040,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_pool_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2055,10 +2049,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetPoolRequest): request = compute.InsertTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2175,8 +2167,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_pool_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2184,10 +2176,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetPoolRequest): request = compute.InsertTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2327,8 +2317,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -2336,10 +2326,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListTargetPoolsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListTargetPoolsRequest): request = compute.ListTargetPoolsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2473,8 +2461,8 @@ def sample_remove_health_check(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2489,10 +2477,8 @@ def sample_remove_health_check(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveHealthCheckTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveHealthCheckTargetPoolRequest): request = compute.RemoveHealthCheckTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2624,8 +2610,8 @@ def sample_remove_health_check(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2640,10 +2626,8 @@ def sample_remove_health_check(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveHealthCheckTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveHealthCheckTargetPoolRequest): request = compute.RemoveHealthCheckTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2800,8 +2784,8 @@ def sample_remove_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2816,10 +2800,8 @@ def sample_remove_instance(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveInstanceTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveInstanceTargetPoolRequest): request = compute.RemoveInstanceTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2951,8 +2933,8 @@ def sample_remove_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2967,10 +2949,8 @@ def sample_remove_instance(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.RemoveInstanceTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.RemoveInstanceTargetPoolRequest): request = compute.RemoveInstanceTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3125,8 +3105,8 @@ def sample_set_backup(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_pool, target_reference_resource] ) @@ -3136,10 +3116,8 @@ def sample_set_backup(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetBackupTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetBackupTargetPoolRequest): request = compute.SetBackupTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3267,8 +3245,8 @@ def sample_set_backup(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_pool, target_reference_resource] ) @@ -3278,10 +3256,8 @@ def sample_set_backup(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetBackupTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetBackupTargetPoolRequest): request = compute.SetBackupTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3441,8 +3417,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_pool, security_policy_reference_resource] ) @@ -3452,10 +3428,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSecurityPolicyTargetPoolRequest): request = compute.SetSecurityPolicyTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3592,8 +3566,8 @@ def sample_set_security_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, target_pool, security_policy_reference_resource] ) @@ -3603,10 +3577,8 @@ def sample_set_security_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSecurityPolicyTargetPoolRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSecurityPolicyTargetPoolRequest): request = compute.SetSecurityPolicyTargetPoolRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py index 6fc0e6e6a8af..7e68158d25e9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_ssl_proxies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TargetSslProxiesTransport]] = None, + transport: Optional[ + Union[ + str, TargetSslProxiesTransport, Callable[..., TargetSslProxiesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, TargetSslProxiesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,TargetSslProxiesTransport,Callable[..., TargetSslProxiesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TargetSslProxiesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[TargetSslProxiesTransport], + Callable[..., TargetSslProxiesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TargetSslProxiesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -714,8 +729,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_ssl_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -723,10 +738,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetSslProxyRequest): request = compute.DeleteTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -834,8 +847,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_ssl_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -843,10 +856,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetSslProxyRequest): request = compute.DeleteTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -984,8 +995,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_ssl_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -993,10 +1004,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetTargetSslProxyRequest): request = compute.GetTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1102,8 +1111,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_ssl_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1111,10 +1120,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetSslProxyRequest): request = compute.InsertTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1215,8 +1222,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_ssl_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1224,10 +1231,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetSslProxyRequest): request = compute.InsertTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1350,8 +1355,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1359,10 +1364,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListTargetSslProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListTargetSslProxiesRequest): request = compute.ListTargetSslProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1483,8 +1486,8 @@ def sample_set_backend_service(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1498,10 +1501,8 @@ def sample_set_backend_service(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetBackendServiceTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetBackendServiceTargetSslProxyRequest): request = compute.SetBackendServiceTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1624,8 +1625,8 @@ def sample_set_backend_service(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1639,10 +1640,8 @@ def sample_set_backend_service(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetBackendServiceTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetBackendServiceTargetSslProxyRequest): request = compute.SetBackendServiceTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1790,8 +1789,8 @@ def sample_set_certificate_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1805,10 +1804,8 @@ def sample_set_certificate_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetCertificateMapTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetCertificateMapTargetSslProxyRequest): request = compute.SetCertificateMapTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1932,8 +1929,8 @@ def sample_set_certificate_map(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1947,10 +1944,8 @@ def sample_set_certificate_map(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetCertificateMapTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetCertificateMapTargetSslProxyRequest): request = compute.SetCertificateMapTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2096,8 +2091,8 @@ def sample_set_proxy_header(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2111,10 +2106,8 @@ def sample_set_proxy_header(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetProxyHeaderTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetProxyHeaderTargetSslProxyRequest): request = compute.SetProxyHeaderTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2236,8 +2229,8 @@ def sample_set_proxy_header(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2251,10 +2244,8 @@ def sample_set_proxy_header(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetProxyHeaderTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetProxyHeaderTargetSslProxyRequest): request = compute.SetProxyHeaderTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2401,8 +2392,8 @@ def sample_set_ssl_certificates(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2416,10 +2407,8 @@ def sample_set_ssl_certificates(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslCertificatesTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSslCertificatesTargetSslProxyRequest): request = compute.SetSslCertificatesTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2542,8 +2531,8 @@ def sample_set_ssl_certificates(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2557,10 +2546,8 @@ def sample_set_ssl_certificates(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslCertificatesTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSslCertificatesTargetSslProxyRequest): request = compute.SetSslCertificatesTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2710,8 +2697,8 @@ def sample_set_ssl_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_ssl_proxy, ssl_policy_reference_resource] ) @@ -2721,10 +2708,8 @@ def sample_set_ssl_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslPolicyTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSslPolicyTargetSslProxyRequest): request = compute.SetSslPolicyTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2848,8 +2833,8 @@ def sample_set_ssl_policy(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, target_ssl_proxy, ssl_policy_reference_resource] ) @@ -2859,10 +2844,8 @@ def sample_set_ssl_policy(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetSslPolicyTargetSslProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetSslPolicyTargetSslProxyRequest): request = compute.SetSslPolicyTargetSslProxyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py index 7d5b6a0de995..f6d6d1ca24dd 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_tcp_proxies/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TargetTcpProxiesTransport]] = None, + transport: Optional[ + Union[ + str, TargetTcpProxiesTransport, Callable[..., TargetTcpProxiesTransport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +523,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, TargetTcpProxiesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,TargetTcpProxiesTransport,Callable[..., TargetTcpProxiesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TargetTcpProxiesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +639,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[TargetTcpProxiesTransport], + Callable[..., TargetTcpProxiesTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TargetTcpProxiesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -714,8 +729,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -723,10 +738,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListTargetTcpProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListTargetTcpProxiesRequest): request = compute.AggregatedListTargetTcpProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -836,8 +849,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_tcp_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -845,10 +858,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetTcpProxyRequest): request = compute.DeleteTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -956,8 +967,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_tcp_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -965,10 +976,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetTcpProxyRequest): request = compute.DeleteTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1106,8 +1115,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_tcp_proxy]) if request is not None and has_flattened_params: raise ValueError( @@ -1115,10 +1124,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetTargetTcpProxyRequest): request = compute.GetTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1224,8 +1231,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_tcp_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1233,10 +1240,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetTcpProxyRequest): request = compute.InsertTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1337,8 +1342,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, target_tcp_proxy_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1346,10 +1351,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetTcpProxyRequest): request = compute.InsertTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1472,8 +1475,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1481,10 +1484,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListTargetTcpProxiesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListTargetTcpProxiesRequest): request = compute.ListTargetTcpProxiesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1605,8 +1606,8 @@ def sample_set_backend_service(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1620,10 +1621,8 @@ def sample_set_backend_service(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetBackendServiceTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetBackendServiceTargetTcpProxyRequest): request = compute.SetBackendServiceTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1746,8 +1745,8 @@ def sample_set_backend_service(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1761,10 +1760,8 @@ def sample_set_backend_service(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetBackendServiceTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetBackendServiceTargetTcpProxyRequest): request = compute.SetBackendServiceTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1910,8 +1907,8 @@ def sample_set_proxy_header(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -1925,10 +1922,8 @@ def sample_set_proxy_header(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetProxyHeaderTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetProxyHeaderTargetTcpProxyRequest): request = compute.SetProxyHeaderTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2050,8 +2045,8 @@ def sample_set_proxy_header(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [ project, @@ -2065,10 +2060,8 @@ def sample_set_proxy_header(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetProxyHeaderTargetTcpProxyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetProxyHeaderTargetTcpProxyRequest): request = compute.SetProxyHeaderTargetTcpProxyRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py index d9a0a057514c..41ba988d1a0d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/target_vpn_gateways/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -506,7 +507,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TargetVpnGatewaysTransport]] = None, + transport: Optional[ + Union[ + str, + TargetVpnGatewaysTransport, + Callable[..., TargetVpnGatewaysTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -518,9 +525,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, TargetVpnGatewaysTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,TargetVpnGatewaysTransport,Callable[..., TargetVpnGatewaysTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the TargetVpnGatewaysTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -632,8 +641,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[TargetVpnGatewaysTransport], + Callable[..., TargetVpnGatewaysTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., TargetVpnGatewaysTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -711,8 +728,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -720,10 +737,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListTargetVpnGatewaysRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListTargetVpnGatewaysRequest): request = compute.AggregatedListTargetVpnGatewaysRequest(request) # If we have keyword arguments corresponding to fields on the @@ -840,8 +855,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -849,10 +864,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetVpnGatewayRequest): request = compute.DeleteTargetVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -970,8 +983,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -979,10 +992,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteTargetVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteTargetVpnGatewayRequest): request = compute.DeleteTargetVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1128,8 +1139,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -1137,10 +1148,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetTargetVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetTargetVpnGatewayRequest): request = compute.GetTargetVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1256,8 +1265,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_vpn_gateway_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1265,10 +1274,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetVpnGatewayRequest): request = compute.InsertTargetVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1383,8 +1390,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, target_vpn_gateway_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1392,10 +1399,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertTargetVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertTargetVpnGatewayRequest): request = compute.InsertTargetVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1533,8 +1538,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1542,10 +1547,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListTargetVpnGatewaysRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListTargetVpnGatewaysRequest): request = compute.ListTargetVpnGatewaysRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1678,8 +1681,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -1689,10 +1692,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsTargetVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsTargetVpnGatewayRequest): request = compute.SetLabelsTargetVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1823,8 +1824,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -1834,10 +1835,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsTargetVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsTargetVpnGatewayRequest): request = compute.SetLabelsTargetVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py index 26e33e939486..ccc37363db0b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/url_maps/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, UrlMapsTransport]] = None, + transport: Optional[ + Union[str, UrlMapsTransport, Callable[..., UrlMapsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, UrlMapsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,UrlMapsTransport,Callable[..., UrlMapsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the UrlMapsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[UrlMapsTransport], Callable[..., UrlMapsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., UrlMapsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -707,8 +719,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -716,10 +728,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListUrlMapsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListUrlMapsRequest): request = compute.AggregatedListUrlMapsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -828,8 +838,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map]) if request is not None and has_flattened_params: raise ValueError( @@ -837,10 +847,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteUrlMapRequest): request = compute.DeleteUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -947,8 +955,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map]) if request is not None and has_flattened_params: raise ValueError( @@ -956,10 +964,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteUrlMapRequest): request = compute.DeleteUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1113,8 +1119,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map]) if request is not None and has_flattened_params: raise ValueError( @@ -1122,10 +1128,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetUrlMapRequest): request = compute.GetUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1230,8 +1234,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1239,10 +1243,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertUrlMapRequest): request = compute.InsertUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1342,8 +1344,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1351,10 +1353,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertUrlMapRequest): request = compute.InsertUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1492,8 +1492,8 @@ def sample_invalidate_cache(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map, cache_invalidation_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1501,10 +1501,8 @@ def sample_invalidate_cache(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InvalidateCacheUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InvalidateCacheUrlMapRequest): request = compute.InvalidateCacheUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1627,8 +1625,8 @@ def sample_invalidate_cache(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map, cache_invalidation_rule_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1636,10 +1634,8 @@ def sample_invalidate_cache(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InvalidateCacheUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InvalidateCacheUrlMapRequest): request = compute.InvalidateCacheUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1770,8 +1766,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1779,10 +1775,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListUrlMapsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListUrlMapsRequest): request = compute.ListUrlMapsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1898,8 +1892,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1907,10 +1901,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchUrlMapRequest): request = compute.PatchUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2026,8 +2018,8 @@ def sample_patch(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2035,10 +2027,8 @@ def sample_patch(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.PatchUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.PatchUrlMapRequest): request = compute.PatchUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2178,8 +2168,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2187,10 +2177,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateUrlMapRequest): request = compute.UpdateUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2306,8 +2294,8 @@ def sample_update(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, url_map, url_map_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -2315,10 +2303,8 @@ def sample_update(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.UpdateUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.UpdateUrlMapRequest): request = compute.UpdateUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2460,8 +2446,8 @@ def sample_validate(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, url_map, url_maps_validate_request_resource] ) @@ -2471,10 +2457,8 @@ def sample_validate(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ValidateUrlMapRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ValidateUrlMapRequest): request = compute.ValidateUrlMapRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py index ce917fdae5f2..4f3bfa7b797b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_gateways/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, VpnGatewaysTransport]] = None, + transport: Optional[ + Union[str, VpnGatewaysTransport, Callable[..., VpnGatewaysTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, VpnGatewaysTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,VpnGatewaysTransport,Callable[..., VpnGatewaysTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the VpnGatewaysTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[VpnGatewaysTransport], Callable[..., VpnGatewaysTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., VpnGatewaysTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -704,8 +716,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -713,10 +725,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListVpnGatewaysRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListVpnGatewaysRequest): request = compute.AggregatedListVpnGatewaysRequest(request) # If we have keyword arguments corresponding to fields on the @@ -831,8 +841,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -840,10 +850,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteVpnGatewayRequest): request = compute.DeleteVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -959,8 +967,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -968,10 +976,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteVpnGatewayRequest): request = compute.DeleteVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1119,8 +1125,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -1128,10 +1134,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetVpnGatewayRequest): request = compute.GetVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1245,8 +1249,8 @@ def sample_get_status(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_gateway]) if request is not None and has_flattened_params: raise ValueError( @@ -1254,10 +1258,8 @@ def sample_get_status(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetStatusVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetStatusVpnGatewayRequest): request = compute.GetStatusVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1373,8 +1375,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_gateway_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1382,10 +1384,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertVpnGatewayRequest): request = compute.InsertVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1500,8 +1500,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_gateway_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1509,10 +1509,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertVpnGatewayRequest): request = compute.InsertVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1650,8 +1648,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1659,10 +1657,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListVpnGatewaysRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListVpnGatewaysRequest): request = compute.ListVpnGatewaysRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1795,8 +1791,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -1806,10 +1802,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsVpnGatewayRequest): request = compute.SetLabelsVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1940,8 +1934,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -1951,10 +1945,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsVpnGatewayRequest): request = compute.SetLabelsVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2112,8 +2104,8 @@ def sample_test_iam_permissions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, test_permissions_request_resource] ) @@ -2123,10 +2115,8 @@ def sample_test_iam_permissions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.TestIamPermissionsVpnGatewayRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.TestIamPermissionsVpnGatewayRequest): request = compute.TestIamPermissionsVpnGatewayRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py index 99a1c5eb1d2b..4909a02a3909 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/vpn_tunnels/client.py @@ -18,6 +18,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -504,7 +505,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, VpnTunnelsTransport]] = None, + transport: Optional[ + Union[str, VpnTunnelsTransport, Callable[..., VpnTunnelsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -516,9 +519,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, VpnTunnelsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,VpnTunnelsTransport,Callable[..., VpnTunnelsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the VpnTunnelsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -627,8 +632,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[VpnTunnelsTransport], Callable[..., VpnTunnelsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., VpnTunnelsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -704,8 +716,8 @@ def sample_aggregated_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -713,10 +725,8 @@ def sample_aggregated_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.AggregatedListVpnTunnelsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.AggregatedListVpnTunnelsRequest): request = compute.AggregatedListVpnTunnelsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -833,8 +843,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_tunnel]) if request is not None and has_flattened_params: raise ValueError( @@ -842,10 +852,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteVpnTunnelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteVpnTunnelRequest): request = compute.DeleteVpnTunnelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -963,8 +971,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_tunnel]) if request is not None and has_flattened_params: raise ValueError( @@ -972,10 +980,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteVpnTunnelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteVpnTunnelRequest): request = compute.DeleteVpnTunnelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1118,8 +1124,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_tunnel]) if request is not None and has_flattened_params: raise ValueError( @@ -1127,10 +1133,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetVpnTunnelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetVpnTunnelRequest): request = compute.GetVpnTunnelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1246,8 +1250,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_tunnel_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1255,10 +1259,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertVpnTunnelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertVpnTunnelRequest): request = compute.InsertVpnTunnelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1373,8 +1375,8 @@ def sample_insert(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region, vpn_tunnel_resource]) if request is not None and has_flattened_params: raise ValueError( @@ -1382,10 +1384,8 @@ def sample_insert(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.InsertVpnTunnelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.InsertVpnTunnelRequest): request = compute.InsertVpnTunnelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1523,8 +1523,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, region]) if request is not None and has_flattened_params: raise ValueError( @@ -1532,10 +1532,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListVpnTunnelsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListVpnTunnelsRequest): request = compute.ListVpnTunnelsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1668,8 +1666,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -1679,10 +1677,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsVpnTunnelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsVpnTunnelRequest): request = compute.SetLabelsVpnTunnelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1813,8 +1809,8 @@ def sample_set_labels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any( [project, region, resource, region_set_labels_request_resource] ) @@ -1824,10 +1820,8 @@ def sample_set_labels(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.SetLabelsVpnTunnelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.SetLabelsVpnTunnelRequest): request = compute.SetLabelsVpnTunnelRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py index 487cab7ffe98..18f77247415b 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_operations/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -503,7 +504,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ZoneOperationsTransport]] = None, + transport: Optional[ + Union[str, ZoneOperationsTransport, Callable[..., ZoneOperationsTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -515,9 +518,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ZoneOperationsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ZoneOperationsTransport,Callable[..., ZoneOperationsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ZoneOperationsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -629,8 +634,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ZoneOperationsTransport], Callable[..., ZoneOperationsTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ZoneOperationsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -720,8 +732,8 @@ def sample_delete(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -729,10 +741,8 @@ def sample_delete(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.DeleteZoneOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.DeleteZoneOperationRequest): request = compute.DeleteZoneOperationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -866,8 +876,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -875,10 +885,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetZoneOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetZoneOperationRequest): request = compute.GetZoneOperationRequest(request) # If we have keyword arguments corresponding to fields on the @@ -992,8 +1000,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -1001,10 +1009,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListZoneOperationsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListZoneOperationsRequest): request = compute.ListZoneOperationsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1154,8 +1160,8 @@ def sample_wait(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone, operation]) if request is not None and has_flattened_params: raise ValueError( @@ -1163,10 +1169,8 @@ def sample_wait(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.WaitZoneOperationRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.WaitZoneOperationRequest): request = compute.WaitZoneOperationRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py index 6c612e1ffdc7..0e132ac94cb6 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zones/client.py @@ -17,6 +17,7 @@ import os import re from typing import ( + Callable, Dict, Mapping, MutableMapping, @@ -501,7 +502,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ZonesTransport]] = None, + transport: Optional[ + Union[str, ZonesTransport, Callable[..., ZonesTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -513,9 +516,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ZonesTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ZonesTransport,Callable[..., ZonesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ZonesTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -624,8 +629,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ZonesTransport], Callable[..., ZonesTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ZonesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -707,8 +719,8 @@ def sample_get(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project, zone]) if request is not None and has_flattened_params: raise ValueError( @@ -716,10 +728,8 @@ def sample_get(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.GetZoneRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.GetZoneRequest): request = compute.GetZoneRequest(request) # If we have keyword arguments corresponding to fields on the @@ -822,8 +832,8 @@ def sample_list(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -831,10 +841,8 @@ def sample_list(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a compute.ListZonesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, compute.ListZonesRequest): request = compute.ListZonesRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py index 2b914fef6788..b5e1ad81fe09 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py @@ -79,6 +79,8 @@ AggregatedListServiceAttachmentsRequest, AggregatedListSslCertificatesRequest, AggregatedListSslPoliciesRequest, + AggregatedListStoragePoolsRequest, + AggregatedListStoragePoolTypesRequest, AggregatedListSubnetworksRequest, AggregatedListTargetHttpProxiesRequest, AggregatedListTargetHttpsProxiesRequest, @@ -163,6 +165,7 @@ BulkInsertRegionInstanceRequest, CacheInvalidationRule, CacheKeyPolicy, + CancelInstanceGroupManagerResizeRequestRequest, CircuitBreakers, CloneRulesFirewallPolicyRequest, CloneRulesNetworkFirewallPolicyRequest, @@ -205,6 +208,7 @@ DeleteHealthCheckRequest, DeleteImageRequest, DeleteInstanceGroupManagerRequest, + DeleteInstanceGroupManagerResizeRequestRequest, DeleteInstanceGroupRequest, DeleteInstanceRequest, DeleteInstancesInstanceGroupManagerRequest, @@ -259,6 +263,7 @@ DeleteSnapshotRequest, DeleteSslCertificateRequest, DeleteSslPolicyRequest, + DeleteStoragePoolRequest, DeleteSubnetworkRequest, DeleteTargetGrpcProxyRequest, DeleteTargetHttpProxyRequest, @@ -390,12 +395,15 @@ GetIamPolicyResourcePolicyRequest, GetIamPolicyServiceAttachmentRequest, GetIamPolicySnapshotRequest, + GetIamPolicyStoragePoolRequest, GetIamPolicySubnetworkRequest, GetImageFamilyViewRequest, GetImageRequest, GetInstanceGroupManagerRequest, + GetInstanceGroupManagerResizeRequestRequest, GetInstanceGroupRequest, GetInstanceRequest, + GetInstanceSettingRequest, GetInstanceTemplateRequest, GetInstantSnapshotRequest, GetInterconnectAttachmentRequest, @@ -464,6 +472,8 @@ GetSslCertificateRequest, GetSslPolicyRequest, GetStatusVpnGatewayRequest, + GetStoragePoolRequest, + GetStoragePoolTypeRequest, GetSubnetworkRequest, GetTargetGrpcProxyRequest, GetTargetHttpProxyRequest, @@ -540,6 +550,7 @@ InsertHealthCheckRequest, InsertImageRequest, InsertInstanceGroupManagerRequest, + InsertInstanceGroupManagerResizeRequestRequest, InsertInstanceGroupRequest, InsertInstanceRequest, InsertInstanceTemplateRequest, @@ -586,6 +597,7 @@ InsertSnapshotRequest, InsertSslCertificateRequest, InsertSslPolicyRequest, + InsertStoragePoolRequest, InsertSubnetworkRequest, InsertTargetGrpcProxyRequest, InsertTargetHttpProxyRequest, @@ -612,6 +624,10 @@ InstanceGroupManagerAutoHealingPolicy, InstanceGroupManagerInstanceLifecyclePolicy, InstanceGroupManagerList, + InstanceGroupManagerResizeRequest, + InstanceGroupManagerResizeRequestsListResponse, + InstanceGroupManagerResizeRequestStatus, + InstanceGroupManagerResizeRequestStatusLastAttempt, InstanceGroupManagersAbandonInstancesRequest, InstanceGroupManagersApplyUpdatesRequest, InstanceGroupManagersCreateInstancesRequest, @@ -651,6 +667,8 @@ InstanceReference, InstancesAddResourcePoliciesRequest, InstancesBulkInsertOperationMetadata, + InstanceSettings, + InstanceSettingsMetadata, InstancesGetEffectiveFirewallsResponse, InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, InstancesRemoveResourcePoliciesRequest, @@ -723,6 +741,7 @@ ListBackendBucketsRequest, ListBackendServicesRequest, ListDisksRequest, + ListDisksStoragePoolsRequest, ListDiskTypesRequest, ListErrorsInstanceGroupManagersRequest, ListErrorsRegionInstanceGroupManagersRequest, @@ -738,6 +757,7 @@ ListGlobalPublicDelegatedPrefixesRequest, ListHealthChecksRequest, ListImagesRequest, + ListInstanceGroupManagerResizeRequestsRequest, ListInstanceGroupManagersRequest, ListInstanceGroupsRequest, ListInstancesInstanceGroupsRequest, @@ -806,6 +826,8 @@ ListSnapshotsRequest, ListSslCertificatesRequest, ListSslPoliciesRequest, + ListStoragePoolsRequest, + ListStoragePoolTypesRequest, ListSubnetworksRequest, ListTargetGrpcProxiesRequest, ListTargetHttpProxiesRequest, @@ -898,6 +920,7 @@ NodeGroupsAddNodesRequest, NodeGroupsDeleteNodesRequest, NodeGroupsListNodes, + NodeGroupsPerformMaintenanceRequest, NodeGroupsScopedList, NodeGroupsSetNodeTemplateRequest, NodeGroupsSimulateMaintenanceEventRequest, @@ -940,6 +963,7 @@ PatchHealthCheckRequest, PatchImageRequest, PatchInstanceGroupManagerRequest, + PatchInstanceSettingRequest, PatchInterconnectAttachmentRequest, PatchInterconnectRequest, PatchNetworkAttachmentRequest, @@ -981,6 +1005,7 @@ PathMatcher, PathRule, PerformMaintenanceInstanceRequest, + PerformMaintenanceNodeGroupRequest, PerInstanceConfig, Policy, PreconfiguredWafSet, @@ -1006,6 +1031,7 @@ PublicDelegatedPrefixPublicDelegatedSubPrefix, Quota, QuotaExceededInfo, + QuotaStatusWarning, RawDisk, RecreateInstancesInstanceGroupManagerRequest, RecreateInstancesRegionInstanceGroupManagerRequest, @@ -1207,6 +1233,7 @@ SetIamPolicyResourcePolicyRequest, SetIamPolicyServiceAttachmentRequest, SetIamPolicySnapshotRequest, + SetIamPolicyStoragePoolRequest, SetIamPolicySubnetworkRequest, SetInstanceTemplateInstanceGroupManagerRequest, SetInstanceTemplateRegionInstanceGroupManagerRequest, @@ -1307,6 +1334,17 @@ StopGroupAsyncReplicationDiskRequest, StopGroupAsyncReplicationRegionDiskRequest, StopInstanceRequest, + StoragePool, + StoragePoolAggregatedList, + StoragePoolDisk, + StoragePoolList, + StoragePoolListDisks, + StoragePoolResourceStatus, + StoragePoolsScopedList, + StoragePoolType, + StoragePoolTypeAggregatedList, + StoragePoolTypeList, + StoragePoolTypesScopedList, Subnetwork, SubnetworkAggregatedList, SubnetworkList, @@ -1390,6 +1428,7 @@ TestIamPermissionsResourcePolicyRequest, TestIamPermissionsServiceAttachmentRequest, TestIamPermissionsSnapshotRequest, + TestIamPermissionsStoragePoolRequest, TestIamPermissionsSubnetworkRequest, TestIamPermissionsVpnGatewayRequest, TestPermissionsRequest, @@ -1418,6 +1457,7 @@ UpdateReservationRequest, UpdateRouterRequest, UpdateShieldedInstanceConfigInstanceRequest, + UpdateStoragePoolRequest, UpdateUrlMapRequest, UrlMap, UrlMapList, @@ -1538,6 +1578,8 @@ "AggregatedListServiceAttachmentsRequest", "AggregatedListSslCertificatesRequest", "AggregatedListSslPoliciesRequest", + "AggregatedListStoragePoolsRequest", + "AggregatedListStoragePoolTypesRequest", "AggregatedListSubnetworksRequest", "AggregatedListTargetHttpProxiesRequest", "AggregatedListTargetHttpsProxiesRequest", @@ -1622,6 +1664,7 @@ "BulkInsertRegionInstanceRequest", "CacheInvalidationRule", "CacheKeyPolicy", + "CancelInstanceGroupManagerResizeRequestRequest", "CircuitBreakers", "CloneRulesFirewallPolicyRequest", "CloneRulesNetworkFirewallPolicyRequest", @@ -1664,6 +1707,7 @@ "DeleteHealthCheckRequest", "DeleteImageRequest", "DeleteInstanceGroupManagerRequest", + "DeleteInstanceGroupManagerResizeRequestRequest", "DeleteInstanceGroupRequest", "DeleteInstanceRequest", "DeleteInstancesInstanceGroupManagerRequest", @@ -1718,6 +1762,7 @@ "DeleteSnapshotRequest", "DeleteSslCertificateRequest", "DeleteSslPolicyRequest", + "DeleteStoragePoolRequest", "DeleteSubnetworkRequest", "DeleteTargetGrpcProxyRequest", "DeleteTargetHttpProxyRequest", @@ -1849,12 +1894,15 @@ "GetIamPolicyResourcePolicyRequest", "GetIamPolicyServiceAttachmentRequest", "GetIamPolicySnapshotRequest", + "GetIamPolicyStoragePoolRequest", "GetIamPolicySubnetworkRequest", "GetImageFamilyViewRequest", "GetImageRequest", "GetInstanceGroupManagerRequest", + "GetInstanceGroupManagerResizeRequestRequest", "GetInstanceGroupRequest", "GetInstanceRequest", + "GetInstanceSettingRequest", "GetInstanceTemplateRequest", "GetInstantSnapshotRequest", "GetInterconnectAttachmentRequest", @@ -1923,6 +1971,8 @@ "GetSslCertificateRequest", "GetSslPolicyRequest", "GetStatusVpnGatewayRequest", + "GetStoragePoolRequest", + "GetStoragePoolTypeRequest", "GetSubnetworkRequest", "GetTargetGrpcProxyRequest", "GetTargetHttpProxyRequest", @@ -1999,6 +2049,7 @@ "InsertHealthCheckRequest", "InsertImageRequest", "InsertInstanceGroupManagerRequest", + "InsertInstanceGroupManagerResizeRequestRequest", "InsertInstanceGroupRequest", "InsertInstanceRequest", "InsertInstanceTemplateRequest", @@ -2045,6 +2096,7 @@ "InsertSnapshotRequest", "InsertSslCertificateRequest", "InsertSslPolicyRequest", + "InsertStoragePoolRequest", "InsertSubnetworkRequest", "InsertTargetGrpcProxyRequest", "InsertTargetHttpProxyRequest", @@ -2071,6 +2123,10 @@ "InstanceGroupManagerAutoHealingPolicy", "InstanceGroupManagerInstanceLifecyclePolicy", "InstanceGroupManagerList", + "InstanceGroupManagerResizeRequest", + "InstanceGroupManagerResizeRequestsListResponse", + "InstanceGroupManagerResizeRequestStatus", + "InstanceGroupManagerResizeRequestStatusLastAttempt", "InstanceGroupManagersAbandonInstancesRequest", "InstanceGroupManagersApplyUpdatesRequest", "InstanceGroupManagersCreateInstancesRequest", @@ -2110,6 +2166,8 @@ "InstanceReference", "InstancesAddResourcePoliciesRequest", "InstancesBulkInsertOperationMetadata", + "InstanceSettings", + "InstanceSettingsMetadata", "InstancesGetEffectiveFirewallsResponse", "InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "InstancesRemoveResourcePoliciesRequest", @@ -2182,6 +2240,7 @@ "ListBackendBucketsRequest", "ListBackendServicesRequest", "ListDisksRequest", + "ListDisksStoragePoolsRequest", "ListDiskTypesRequest", "ListErrorsInstanceGroupManagersRequest", "ListErrorsRegionInstanceGroupManagersRequest", @@ -2197,6 +2256,7 @@ "ListGlobalPublicDelegatedPrefixesRequest", "ListHealthChecksRequest", "ListImagesRequest", + "ListInstanceGroupManagerResizeRequestsRequest", "ListInstanceGroupManagersRequest", "ListInstanceGroupsRequest", "ListInstancesInstanceGroupsRequest", @@ -2265,6 +2325,8 @@ "ListSnapshotsRequest", "ListSslCertificatesRequest", "ListSslPoliciesRequest", + "ListStoragePoolsRequest", + "ListStoragePoolTypesRequest", "ListSubnetworksRequest", "ListTargetGrpcProxiesRequest", "ListTargetHttpProxiesRequest", @@ -2357,6 +2419,7 @@ "NodeGroupsAddNodesRequest", "NodeGroupsDeleteNodesRequest", "NodeGroupsListNodes", + "NodeGroupsPerformMaintenanceRequest", "NodeGroupsScopedList", "NodeGroupsSetNodeTemplateRequest", "NodeGroupsSimulateMaintenanceEventRequest", @@ -2399,6 +2462,7 @@ "PatchHealthCheckRequest", "PatchImageRequest", "PatchInstanceGroupManagerRequest", + "PatchInstanceSettingRequest", "PatchInterconnectAttachmentRequest", "PatchInterconnectRequest", "PatchNetworkAttachmentRequest", @@ -2440,6 +2504,7 @@ "PathMatcher", "PathRule", "PerformMaintenanceInstanceRequest", + "PerformMaintenanceNodeGroupRequest", "PerInstanceConfig", "Policy", "PreconfiguredWafSet", @@ -2465,6 +2530,7 @@ "PublicDelegatedPrefixPublicDelegatedSubPrefix", "Quota", "QuotaExceededInfo", + "QuotaStatusWarning", "RawDisk", "RecreateInstancesInstanceGroupManagerRequest", "RecreateInstancesRegionInstanceGroupManagerRequest", @@ -2666,6 +2732,7 @@ "SetIamPolicyResourcePolicyRequest", "SetIamPolicyServiceAttachmentRequest", "SetIamPolicySnapshotRequest", + "SetIamPolicyStoragePoolRequest", "SetIamPolicySubnetworkRequest", "SetInstanceTemplateInstanceGroupManagerRequest", "SetInstanceTemplateRegionInstanceGroupManagerRequest", @@ -2766,6 +2833,17 @@ "StopGroupAsyncReplicationDiskRequest", "StopGroupAsyncReplicationRegionDiskRequest", "StopInstanceRequest", + "StoragePool", + "StoragePoolAggregatedList", + "StoragePoolDisk", + "StoragePoolList", + "StoragePoolListDisks", + "StoragePoolResourceStatus", + "StoragePoolsScopedList", + "StoragePoolType", + "StoragePoolTypeAggregatedList", + "StoragePoolTypeList", + "StoragePoolTypesScopedList", "Subnetwork", "SubnetworkAggregatedList", "SubnetworkList", @@ -2849,6 +2927,7 @@ "TestIamPermissionsResourcePolicyRequest", "TestIamPermissionsServiceAttachmentRequest", "TestIamPermissionsSnapshotRequest", + "TestIamPermissionsStoragePoolRequest", "TestIamPermissionsSubnetworkRequest", "TestIamPermissionsVpnGatewayRequest", "TestPermissionsRequest", @@ -2877,6 +2956,7 @@ "UpdateReservationRequest", "UpdateRouterRequest", "UpdateShieldedInstanceConfigInstanceRequest", + "UpdateStoragePoolRequest", "UpdateUrlMapRequest", "UrlMap", "UrlMapList", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py index 77b466022994..89b4d61be105 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py @@ -89,6 +89,8 @@ "AggregatedListServiceAttachmentsRequest", "AggregatedListSslCertificatesRequest", "AggregatedListSslPoliciesRequest", + "AggregatedListStoragePoolTypesRequest", + "AggregatedListStoragePoolsRequest", "AggregatedListSubnetworksRequest", "AggregatedListTargetHttpProxiesRequest", "AggregatedListTargetHttpsProxiesRequest", @@ -172,6 +174,7 @@ "BulkInsertRegionInstanceRequest", "CacheInvalidationRule", "CacheKeyPolicy", + "CancelInstanceGroupManagerResizeRequestRequest", "CircuitBreakers", "CloneRulesFirewallPolicyRequest", "CloneRulesNetworkFirewallPolicyRequest", @@ -214,6 +217,7 @@ "DeleteHealthCheckRequest", "DeleteImageRequest", "DeleteInstanceGroupManagerRequest", + "DeleteInstanceGroupManagerResizeRequestRequest", "DeleteInstanceGroupRequest", "DeleteInstanceRequest", "DeleteInstanceTemplateRequest", @@ -268,6 +272,7 @@ "DeleteSnapshotRequest", "DeleteSslCertificateRequest", "DeleteSslPolicyRequest", + "DeleteStoragePoolRequest", "DeleteSubnetworkRequest", "DeleteTargetGrpcProxyRequest", "DeleteTargetHttpProxyRequest", @@ -400,12 +405,15 @@ "GetIamPolicyResourcePolicyRequest", "GetIamPolicyServiceAttachmentRequest", "GetIamPolicySnapshotRequest", + "GetIamPolicyStoragePoolRequest", "GetIamPolicySubnetworkRequest", "GetImageFamilyViewRequest", "GetImageRequest", "GetInstanceGroupManagerRequest", + "GetInstanceGroupManagerResizeRequestRequest", "GetInstanceGroupRequest", "GetInstanceRequest", + "GetInstanceSettingRequest", "GetInstanceTemplateRequest", "GetInstantSnapshotRequest", "GetInterconnectAttachmentRequest", @@ -474,6 +482,8 @@ "GetSslCertificateRequest", "GetSslPolicyRequest", "GetStatusVpnGatewayRequest", + "GetStoragePoolRequest", + "GetStoragePoolTypeRequest", "GetSubnetworkRequest", "GetTargetGrpcProxyRequest", "GetTargetHttpProxyRequest", @@ -549,6 +559,7 @@ "InsertHealthCheckRequest", "InsertImageRequest", "InsertInstanceGroupManagerRequest", + "InsertInstanceGroupManagerResizeRequestRequest", "InsertInstanceGroupRequest", "InsertInstanceRequest", "InsertInstanceTemplateRequest", @@ -595,6 +606,7 @@ "InsertSnapshotRequest", "InsertSslCertificateRequest", "InsertSslPolicyRequest", + "InsertStoragePoolRequest", "InsertSubnetworkRequest", "InsertTargetGrpcProxyRequest", "InsertTargetHttpProxyRequest", @@ -621,6 +633,10 @@ "InstanceGroupManagerAutoHealingPolicy", "InstanceGroupManagerInstanceLifecyclePolicy", "InstanceGroupManagerList", + "InstanceGroupManagerResizeRequest", + "InstanceGroupManagerResizeRequestStatus", + "InstanceGroupManagerResizeRequestStatusLastAttempt", + "InstanceGroupManagerResizeRequestsListResponse", "InstanceGroupManagerStatus", "InstanceGroupManagerStatusAllInstancesConfig", "InstanceGroupManagerStatusStateful", @@ -658,6 +674,8 @@ "InstanceProperties", "InstancePropertiesPatch", "InstanceReference", + "InstanceSettings", + "InstanceSettingsMetadata", "InstanceTemplate", "InstanceTemplateAggregatedList", "InstanceTemplateList", @@ -733,6 +751,7 @@ "ListBackendServicesRequest", "ListDiskTypesRequest", "ListDisksRequest", + "ListDisksStoragePoolsRequest", "ListErrorsInstanceGroupManagersRequest", "ListErrorsRegionInstanceGroupManagersRequest", "ListExternalVpnGatewaysRequest", @@ -747,6 +766,7 @@ "ListGlobalPublicDelegatedPrefixesRequest", "ListHealthChecksRequest", "ListImagesRequest", + "ListInstanceGroupManagerResizeRequestsRequest", "ListInstanceGroupManagersRequest", "ListInstanceGroupsRequest", "ListInstanceTemplatesRequest", @@ -815,6 +835,8 @@ "ListSnapshotsRequest", "ListSslCertificatesRequest", "ListSslPoliciesRequest", + "ListStoragePoolTypesRequest", + "ListStoragePoolsRequest", "ListSubnetworksRequest", "ListTargetGrpcProxiesRequest", "ListTargetHttpProxiesRequest", @@ -907,6 +929,7 @@ "NodeGroupsAddNodesRequest", "NodeGroupsDeleteNodesRequest", "NodeGroupsListNodes", + "NodeGroupsPerformMaintenanceRequest", "NodeGroupsScopedList", "NodeGroupsSetNodeTemplateRequest", "NodeGroupsSimulateMaintenanceEventRequest", @@ -949,6 +972,7 @@ "PatchHealthCheckRequest", "PatchImageRequest", "PatchInstanceGroupManagerRequest", + "PatchInstanceSettingRequest", "PatchInterconnectAttachmentRequest", "PatchInterconnectRequest", "PatchNetworkAttachmentRequest", @@ -991,6 +1015,7 @@ "PathRule", "PerInstanceConfig", "PerformMaintenanceInstanceRequest", + "PerformMaintenanceNodeGroupRequest", "Policy", "PreconfiguredWafSet", "PreservedState", @@ -1015,6 +1040,7 @@ "PublicDelegatedPrefixesScopedList", "Quota", "QuotaExceededInfo", + "QuotaStatusWarning", "RawDisk", "RecreateInstancesInstanceGroupManagerRequest", "RecreateInstancesRegionInstanceGroupManagerRequest", @@ -1217,6 +1243,7 @@ "SetIamPolicyResourcePolicyRequest", "SetIamPolicyServiceAttachmentRequest", "SetIamPolicySnapshotRequest", + "SetIamPolicyStoragePoolRequest", "SetIamPolicySubnetworkRequest", "SetInstanceTemplateInstanceGroupManagerRequest", "SetInstanceTemplateRegionInstanceGroupManagerRequest", @@ -1316,6 +1343,17 @@ "StopGroupAsyncReplicationDiskRequest", "StopGroupAsyncReplicationRegionDiskRequest", "StopInstanceRequest", + "StoragePool", + "StoragePoolAggregatedList", + "StoragePoolDisk", + "StoragePoolList", + "StoragePoolListDisks", + "StoragePoolResourceStatus", + "StoragePoolType", + "StoragePoolTypeAggregatedList", + "StoragePoolTypeList", + "StoragePoolTypesScopedList", + "StoragePoolsScopedList", "Subnetwork", "SubnetworkAggregatedList", "SubnetworkList", @@ -1399,6 +1437,7 @@ "TestIamPermissionsResourcePolicyRequest", "TestIamPermissionsServiceAttachmentRequest", "TestIamPermissionsSnapshotRequest", + "TestIamPermissionsStoragePoolRequest", "TestIamPermissionsSubnetworkRequest", "TestIamPermissionsVpnGatewayRequest", "TestPermissionsRequest", @@ -1427,6 +1466,7 @@ "UpdateReservationRequest", "UpdateRouterRequest", "UpdateShieldedInstanceConfigInstanceRequest", + "UpdateStoragePoolRequest", "UpdateUrlMapRequest", "UrlMap", "UrlMapList", @@ -2099,7 +2139,7 @@ class AccessConfig(proto.Message): The DNS domain name for the public PTR record. You can set this field only if the ``setPublicPtr`` field is enabled in accessConfig. If this field is unspecified in - ipv6AccessConfig, a default PTR record will be createc for + ipv6AccessConfig, a default PTR record will be created for first IP in associated external IPv6 range. This field is a member of `oneof`_ ``_public_ptr_domain_name``. @@ -9017,6 +9057,314 @@ class AggregatedListSslPoliciesRequest(proto.Message): ) +class AggregatedListStoragePoolTypesRequest(proto.Message): + r"""A request message for StoragePoolTypes.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. For example, when + partial success behavior is enabled, + aggregatedList for a single zone scope either + returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + The Shared VPC service project id or service + project number for which aggregated list request + is invoked for subnetworks list-usable api. + + This field is a member of `oneof`_ ``_service_project_number``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) + + +class AggregatedListStoragePoolsRequest(proto.Message): + r"""A request message for StoragePools.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. For example, when + partial success behavior is enabled, + aggregatedList for a single zone scope either + returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + The Shared VPC service project id or service + project number for which aggregated list request + is invoked for subnetworks list-usable api. + + This field is a member of `oneof`_ ``_service_project_number``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) + + class AggregatedListSubnetworksRequest(proto.Message): r"""A request message for Subnetworks.AggregatedList. See the method description for details. @@ -10905,7 +11253,7 @@ class AllocationSpecificSKUAllocationReservedInstanceProperties(proto.Message): class AllocationSpecificSKUReservation(proto.Message): r"""This reservation type allows to pre allocate specific - instance configuration. Next ID: 6 + instance configuration. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -11962,6 +12310,17 @@ class AttachedDiskInitializeParams(proto.Message): source snapshot. This field is a member of `oneof`_ ``_source_snapshot_encryption_key``. + storage_pool (str): + The storage pool in which the new disk is + created. You can provide this as a partial or + full URL to the resource. For example, the + following are valid values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /storagePools/storagePool - + projects/project/zones/zone/storagePools/storagePool + - zones/zone/storagePools/storagePool + + This field is a member of `oneof`_ ``_storage_pool``. """ class Architecture(proto.Enum): @@ -12097,6 +12456,11 @@ class OnUpdateAction(proto.Enum): optional=True, message="CustomerEncryptionKey", ) + storage_pool: str = proto.Field( + proto.STRING, + number=360473440, + optional=True, + ) class AuditConfig(proto.Message): @@ -17523,6 +17887,72 @@ class CacheKeyPolicy(proto.Message): ) +class CancelInstanceGroupManagerResizeRequestRequest(proto.Message): + r"""A request message for + InstanceGroupManagerResizeRequests.Cancel. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. The + name should conform to RFC1035 or be a resource + ID. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resize_request (str): + The name of the resize request to cancel. The + name should conform to RFC1035 or be a resource + ID. + zone (str): + The name of the zone where the managed + instance group is located. The name should + conform to RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resize_request: str = proto.Field( + proto.STRING, + number=216941060, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class CircuitBreakers(proto.Message): r"""Settings controlling the volume of requests, connections and retries to this backend service. @@ -18006,6 +18436,8 @@ class Type(proto.Enum): No description available. ACCELERATOR_OPTIMIZED_A3 (158574526): No description available. + ACCELERATOR_OPTIMIZED_A3_MEGA (156517459): + No description available. COMPUTE_OPTIMIZED (158349023): No description available. COMPUTE_OPTIMIZED_C2D (383246453): @@ -18024,6 +18456,8 @@ class Type(proto.Enum): No description available. GENERAL_PURPOSE_N2D (232471400): No description available. + GENERAL_PURPOSE_N4 (301912158): + No description available. GENERAL_PURPOSE_T2D (232477166): No description available. GRAPHICS_OPTIMIZED (68500563): @@ -18040,6 +18474,7 @@ class Type(proto.Enum): UNDEFINED_TYPE = 0 ACCELERATOR_OPTIMIZED = 280848403 ACCELERATOR_OPTIMIZED_A3 = 158574526 + ACCELERATOR_OPTIMIZED_A3_MEGA = 156517459 COMPUTE_OPTIMIZED = 158349023 COMPUTE_OPTIMIZED_C2D = 383246453 COMPUTE_OPTIMIZED_C3 = 428004784 @@ -18049,6 +18484,7 @@ class Type(proto.Enum): GENERAL_PURPOSE_E2 = 301911877 GENERAL_PURPOSE_N2 = 301912156 GENERAL_PURPOSE_N2D = 232471400 + GENERAL_PURPOSE_N4 = 301912158 GENERAL_PURPOSE_T2D = 232477166 GRAPHICS_OPTIMIZED = 68500563 MEMORY_OPTIMIZED = 281753417 @@ -18672,21 +19108,20 @@ class CorsPolicy(proto.Message): Access-Control-Allow-Methods header. allow_origin_regexes (MutableSequence[str]): Specifies a regular expression that matches allowed origins. - For more information about the regular expression syntax, - see Syntax. An origin is allowed if it matches either an - item in allowOrigins or an item in allowOriginRegexes. - Regular expressions can only be used when the - loadBalancingScheme is set to INTERNAL_SELF_MANAGED. + For more information, see regular expression syntax . An + origin is allowed if it matches either an item in + allowOrigins or an item in allowOriginRegexes. Regular + expressions can only be used when the loadBalancingScheme is + set to INTERNAL_SELF_MANAGED. allow_origins (MutableSequence[str]): Specifies the list of origins that is allowed to do CORS requests. An origin is allowed if it matches either an item in allowOrigins or an item in allowOriginRegexes. disabled (bool): - If true, the setting specifies the CORS - policy is disabled. The default value of false, - which indicates that the CORS policy is in - effect. + If true, disables the CORS policy. The + default value is false, which indicates that the + CORS policy is in effect. This field is a member of `oneof`_ ``_disabled``. expose_headers (MutableSequence[str]): @@ -20062,6 +20497,72 @@ class DeleteInstanceGroupManagerRequest(proto.Message): ) +class DeleteInstanceGroupManagerResizeRequestRequest(proto.Message): + r"""A request message for + InstanceGroupManagerResizeRequests.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. The + name should conform to RFC1035 or be a resource + ID. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resize_request (str): + The name of the resize request to delete. The + name should conform to RFC1035 or be a resource + ID. + zone (str): + The name of the zone where the managed + instance group is located. The name should + conform to RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resize_request: str = proto.Field( + proto.STRING, + number=216941060, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class DeleteInstanceGroupRequest(proto.Message): r"""A request message for InstanceGroups.Delete. See the method description for details. @@ -22794,6 +23295,59 @@ class DeleteSslPolicyRequest(proto.Message): ) +class DeleteStoragePoolRequest(proto.Message): + r"""A request message for StoragePools.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + storage_pool (str): + Name of the storage pool to delete. + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + storage_pool: str = proto.Field( + proto.STRING, + number=360473440, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class DeleteSubnetworkRequest(proto.Message): r"""A request message for Subnetworks.Delete. See the method description for details. @@ -24314,6 +24868,17 @@ class Disk(proto.Message): Status enum for the list of possible values. This field is a member of `oneof`_ ``_status``. + storage_pool (str): + The storage pool in which the new disk is + created. You can provide this as a partial or + full URL to the resource. For example, the + following are valid values: - + https://www.googleapis.com/compute/v1/projects/project/zones/zone + /storagePools/storagePool - + projects/project/zones/zone/storagePools/storagePool + - zones/zone/storagePools/storagePool + + This field is a member of `oneof`_ ``_storage_pool``. type_ (str): URL of the disk type resource describing which disk type to use to create the disk. @@ -24375,6 +24940,9 @@ class Status(proto.Enum): Disk is ready for use. RESTORING (404263851): Source data is being copied into the disk. + UNAVAILABLE (413756464): + Disk is currently unavailable and cannot be + accessed, attached or detached. """ UNDEFINED_STATUS = 0 CREATING = 455564985 @@ -24382,6 +24950,7 @@ class Status(proto.Enum): FAILED = 455706685 READY = 77848963 RESTORING = 404263851 + UNAVAILABLE = 413756464 architecture: str = proto.Field( proto.STRING, @@ -24613,6 +25182,11 @@ class Status(proto.Enum): number=181260274, optional=True, ) + storage_pool: str = proto.Field( + proto.STRING, + number=360473440, + optional=True, + ) type_: str = proto.Field( proto.STRING, number=3575610, @@ -25836,8 +26410,7 @@ class EnableXpnResourceProjectRequest(proto.Message): class Error(proto.Message): - r"""[Output Only] If errors are generated during processing of the - operation, this field will be populated. + r"""Errors that prevented the ResizeRequest to be fulfilled. Attributes: errors (MutableSequence[google.cloud.compute_v1.types.Errors]): @@ -25928,15 +26501,17 @@ class ErrorInfo(proto.Message): This field is a member of `oneof`_ ``_domain``. metadatas (MutableMapping[str, str]): - Additional structured details about this error. Keys should - match `[a-zA-Z0-9-_]` and be limited to 64 characters in - length. When identifying the current value of an exceeded - limit, the units should be contained in the key, not the - value. For example, rather than {"instanceLimit": + Additional structured details about this + error. Keys must match /a-z+/ but should ideally + be lowerCamelCase. Also they must be limited to + 64 characters in length. When identifying the + current value of an exceeded limit, the units + should be contained in the key, not the value. + For example, rather than {"instanceLimit": "100/request"}, should be returned as, - {"instanceLimitPerRequest": "100"}, if the client exceeds - the number of instances that can be created in a single - (batch) request. + {"instanceLimitPerRequest": "100"}, if the + client exceeds the number of instances that can + be created in a single (batch) request. reason (str): The reason of the error. This is a constant value that identifies the proximate cause of the error. Error reasons @@ -28020,6 +28595,18 @@ class ForwardingRule(proto.Message): identifier is defined by the server. This field is a member of `oneof`_ ``_id``. + ip_collection (str): + Resource reference of a PublicDelegatedPrefix. The PDP must + be a sub-PDP in EXTERNAL_IPV6_FORWARDING_RULE_CREATION mode. + Use one of the following formats to specify a sub-PDP when + creating an IPv6 NetLB forwarding rule using BYOIP: Full + resource URL, as in + https://www.googleapis.com/compute/v1/projects/project_id/regions/region + /publicDelegatedPrefixes/sub-pdp-name Partial URL, as in: - + projects/project_id/regions/region/publicDelegatedPrefixes/sub-pdp-name + - regions/region/publicDelegatedPrefixes/sub-pdp-name + + This field is a member of `oneof`_ ``_ip_collection``. ip_version (str): The IP Version that will be used by this forwarding rule. Valid options are IPV4 or IPV6. @@ -28474,6 +29061,11 @@ class PscConnectionStatus(proto.Enum): number=3355, optional=True, ) + ip_collection: str = proto.Field( + proto.STRING, + number=176818358, + optional=True, + ) ip_version: str = proto.Field( proto.STRING, number=294959552, @@ -30565,6 +31157,45 @@ class GetIamPolicySnapshotRequest(proto.Message): ) +class GetIamPolicyStoragePoolRequest(proto.Message): + r"""A request message for StoragePools.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class GetIamPolicySubnetworkRequest(proto.Message): r"""A request message for Subnetworks.GetIamPolicy. See the method description for details. @@ -30680,6 +31311,44 @@ class GetInstanceGroupManagerRequest(proto.Message): ) +class GetInstanceGroupManagerResizeRequestRequest(proto.Message): + r"""A request message for InstanceGroupManagerResizeRequests.Get. + See the method description for details. + + Attributes: + instance_group_manager (str): + The name of the managed instance group. Name + should conform to RFC1035 or be a resource ID. + project (str): + Project ID for this request. + resize_request (str): + The name of the resize request. Name should + conform to RFC1035 or be a resource ID. + zone (str): + Name of the + href="/compute/docs/regions-zones/#available">zone + scoping this request. Name should conform to + RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resize_request: str = proto.Field( + proto.STRING, + number=216941060, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class GetInstanceGroupRequest(proto.Message): r"""A request message for InstanceGroups.Get. See the method description for details. @@ -30735,6 +31404,27 @@ class GetInstanceRequest(proto.Message): ) +class GetInstanceSettingRequest(proto.Message): + r"""A request message for InstanceSettingsService.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class GetInstanceTemplateRequest(proto.Message): r"""A request message for InstanceTemplates.Get. See the method description for details. @@ -32700,6 +33390,60 @@ class GetStatusVpnGatewayRequest(proto.Message): ) +class GetStoragePoolRequest(proto.Message): + r"""A request message for StoragePools.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + storage_pool (str): + Name of the storage pool to return. + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + storage_pool: str = proto.Field( + proto.STRING, + number=360473440, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetStoragePoolTypeRequest(proto.Message): + r"""A request message for StoragePoolTypes.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + storage_pool_type (str): + Name of the storage pool type to return. + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + storage_pool_type: str = proto.Field( + proto.STRING, + number=285999289, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class GetSubnetworkRequest(proto.Message): r"""A request message for Subnetworks.Get. See the method description for details. @@ -33508,7 +34252,7 @@ class GuestOsFeature(proto.Message): commas to separate values. Set to one or more of the following values: - VIRTIO_SCSI_MULTIQUEUE - WINDOWS - MULTI_IP_SUBNET - UEFI_COMPATIBLE - GVNIC - SEV_CAPABLE - - SUSPEND_RESUME_COMPATIBLE - SEV_LIVE_MIGRATABLE - + SUSPEND_RESUME_COMPATIBLE - SEV_LIVE_MIGRATABLE_V2 - SEV_SNP_CAPABLE - TDX_CAPABLE - IDPF For more information, see Enabling guest operating system features. Check the Type enum for the list of possible values. @@ -33521,8 +34265,8 @@ class Type(proto.Enum): separate values. Set to one or more of the following values: - VIRTIO_SCSI_MULTIQUEUE - WINDOWS - MULTI_IP_SUBNET - UEFI_COMPATIBLE - GVNIC - SEV_CAPABLE - SUSPEND_RESUME_COMPATIBLE - - SEV_LIVE_MIGRATABLE - SEV_SNP_CAPABLE - TDX_CAPABLE - IDPF For more - information, see Enabling guest operating system features. + SEV_LIVE_MIGRATABLE_V2 - SEV_SNP_CAPABLE - TDX_CAPABLE - IDPF For + more information, see Enabling guest operating system features. Values: UNDEFINED_TYPE (0): @@ -33637,7 +34381,8 @@ class HTTP2HealthCheck(proto.Message): This field is a member of `oneof`_ ``_proxy_header``. request_path (str): The request path of the HTTP/2 health check - request. The default value is /. + request. The default value is /. Must comply + with RFC3986. This field is a member of `oneof`_ ``_request_path``. response (str): @@ -33817,7 +34562,8 @@ class HTTPHealthCheck(proto.Message): This field is a member of `oneof`_ ``_proxy_header``. request_path (str): The request path of the HTTP health check - request. The default value is /. + request. The default value is /. Must comply + with RFC3986. This field is a member of `oneof`_ ``_request_path``. response (str): @@ -33996,7 +34742,8 @@ class HTTPSHealthCheck(proto.Message): This field is a member of `oneof`_ ``_proxy_header``. request_path (str): The request path of the HTTPS health check - request. The default value is /. + request. The default value is /. Must comply + with RFC3986. This field is a member of `oneof`_ ``_request_path``. response (str): @@ -37688,6 +38435,72 @@ class InsertInstanceGroupManagerRequest(proto.Message): ) +class InsertInstanceGroupManagerResizeRequestRequest(proto.Message): + r"""A request message for + InstanceGroupManagerResizeRequests.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group to + which the resize request will be added. Name + should conform to RFC1035 or be a resource ID. + instance_group_manager_resize_request_resource (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located and where the resize + request will be created. Name should conform to + RFC1035. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_manager_resize_request_resource: "InstanceGroupManagerResizeRequest" = proto.Field( + proto.MESSAGE, + number=468541293, + message="InstanceGroupManagerResizeRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class InsertInstanceGroupRequest(proto.Message): r"""A request message for InstanceGroups.Insert. See the method description for details. @@ -40185,8 +40998,8 @@ class InsertSslPolicyRequest(proto.Message): ) -class InsertSubnetworkRequest(proto.Message): - r"""A request message for Subnetworks.Insert. See the method +class InsertStoragePoolRequest(proto.Message): + r"""A request message for StoragePools.Insert. See the method description for details. @@ -40195,8 +41008,6 @@ class InsertSubnetworkRequest(proto.Message): Attributes: project (str): Project ID for this request. - region (str): - Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -40215,81 +41026,35 @@ class InsertSubnetworkRequest(proto.Message): 00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): + storage_pool_resource (google.cloud.compute_v1.types.StoragePool): The body resource for this request + zone (str): + The name of the zone for this request. """ project: str = proto.Field( proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - subnetwork_resource: "Subnetwork" = proto.Field( + storage_pool_resource: "StoragePool" = proto.Field( proto.MESSAGE, - number=42233151, - message="Subnetwork", - ) - - -class InsertTargetGrpcProxyRequest(proto.Message): - r"""A request message for TargetGrpcProxies.Insert. See the - method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project (str): - Project ID for this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): - The body resource for this request - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, + number=157179405, + message="StoragePool", ) - request_id: str = proto.Field( + zone: str = proto.Field( proto.STRING, - number=37109963, - optional=True, - ) - target_grpc_proxy_resource: "TargetGrpcProxy" = proto.Field( - proto.MESSAGE, - number=328922450, - message="TargetGrpcProxy", + number=3744684, ) -class InsertTargetHttpProxyRequest(proto.Message): - r"""A request message for TargetHttpProxies.Insert. See the - method description for details. +class InsertSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.Insert. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -40297,6 +41062,8 @@ class InsertTargetHttpProxyRequest(proto.Message): Attributes: project (str): Project ID for this request. + region (str): + Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -40315,7 +41082,7 @@ class InsertTargetHttpProxyRequest(proto.Message): 00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): + subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): The body resource for this request """ @@ -40323,69 +41090,25 @@ class InsertTargetHttpProxyRequest(proto.Message): proto.STRING, number=227560217, ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - target_http_proxy_resource: "TargetHttpProxy" = proto.Field( - proto.MESSAGE, - number=24696744, - message="TargetHttpProxy", - ) - - -class InsertTargetHttpsProxyRequest(proto.Message): - r"""A request message for TargetHttpsProxies.Insert. See the - method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project (str): - Project ID for this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): - The body resource for this request - """ - - project: str = proto.Field( + region: str = proto.Field( proto.STRING, - number=227560217, + number=138946292, ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - target_https_proxy_resource: "TargetHttpsProxy" = proto.Field( + subnetwork_resource: "Subnetwork" = proto.Field( proto.MESSAGE, - number=433657473, - message="TargetHttpsProxy", + number=42233151, + message="Subnetwork", ) -class InsertTargetInstanceRequest(proto.Message): - r"""A request message for TargetInstances.Insert. See the method - description for details. +class InsertTargetGrpcProxyRequest(proto.Message): + r"""A request message for TargetGrpcProxies.Insert. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -40411,10 +41134,8 @@ class InsertTargetInstanceRequest(proto.Message): 00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_instance_resource (google.cloud.compute_v1.types.TargetInstance): + target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): The body resource for this request - zone (str): - Name of the zone scoping this request. """ project: str = proto.Field( @@ -40426,20 +41147,16 @@ class InsertTargetInstanceRequest(proto.Message): number=37109963, optional=True, ) - target_instance_resource: "TargetInstance" = proto.Field( + target_grpc_proxy_resource: "TargetGrpcProxy" = proto.Field( proto.MESSAGE, - number=430453066, - message="TargetInstance", - ) - zone: str = proto.Field( - proto.STRING, - number=3744684, + number=328922450, + message="TargetGrpcProxy", ) -class InsertTargetPoolRequest(proto.Message): - r"""A request message for TargetPools.Insert. See the method - description for details. +class InsertTargetHttpProxyRequest(proto.Message): + r"""A request message for TargetHttpProxies.Insert. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -40447,8 +41164,6 @@ class InsertTargetPoolRequest(proto.Message): Attributes: project (str): Project ID for this request. - region (str): - Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -40467,7 +41182,7 @@ class InsertTargetPoolRequest(proto.Message): 00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_pool_resource (google.cloud.compute_v1.types.TargetPool): + target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): The body resource for this request """ @@ -40475,25 +41190,177 @@ class InsertTargetPoolRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - target_pool_resource: "TargetPool" = proto.Field( + target_http_proxy_resource: "TargetHttpProxy" = proto.Field( proto.MESSAGE, - number=101281443, - message="TargetPool", + number=24696744, + message="TargetHttpProxy", ) -class InsertTargetSslProxyRequest(proto.Message): - r"""A request message for TargetSslProxies.Insert. See the method - description for details. +class InsertTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy_resource: "TargetHttpsProxy" = proto.Field( + proto.MESSAGE, + number=433657473, + message="TargetHttpsProxy", + ) + + +class InsertTargetInstanceRequest(proto.Message): + r"""A request message for TargetInstances.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_instance_resource (google.cloud.compute_v1.types.TargetInstance): + The body resource for this request + zone (str): + Name of the zone scoping this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_instance_resource: "TargetInstance" = proto.Field( + proto.MESSAGE, + number=430453066, + message="TargetInstance", + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class InsertTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool_resource (google.cloud.compute_v1.types.TargetPool): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool_resource: "TargetPool" = proto.Field( + proto.MESSAGE, + number=101281443, + message="TargetPool", + ) + + +class InsertTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.Insert. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -42726,6 +43593,319 @@ def raw_page(self): ) +class InstanceGroupManagerResizeRequest(proto.Message): + r"""InstanceGroupManagerResizeRequest represents a request to + create a number of VMs: either immediately or by queuing the + request for the specified time. This resize request is nested + under InstanceGroupManager and the VMs created by this request + are added to the owning InstanceGroupManager. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] The creation timestamp for this resize request + in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] A unique identifier for this resource type. + The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] The resource type, which is always + compute#instanceGroupManagerResizeRequest for resize + requests. + + This field is a member of `oneof`_ ``_kind``. + name (str): + The name of this resize request. The name + must be 1-63 characters long, and comply with + RFC1035. + + This field is a member of `oneof`_ ``_name``. + requested_run_duration (google.cloud.compute_v1.types.Duration): + Requested run duration for instances that + will be created by this request. At the end of + the run duration instance will be deleted. + + This field is a member of `oneof`_ ``_requested_run_duration``. + resize_by (int): + The number of instances to be created by this + resize request. The group's target size will be + increased by this number. + + This field is a member of `oneof`_ ``_resize_by``. + self_link (str): + [Output Only] The URL for this resize request. The server + defines this URL. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + [Output Only] Server-defined URL for this resource with the + resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. + state (str): + [Output only] Current state of the request. Check the State + enum for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + status (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequestStatus): + [Output only] Status of the request. + + This field is a member of `oneof`_ ``_status``. + zone (str): + [Output Only] The URL of a zone where the resize request is + located. Populated only for zonal resize requests. + + This field is a member of `oneof`_ ``_zone``. + """ + + class State(proto.Enum): + r"""[Output only] Current state of the request. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACCEPTED (246714279): + The request was created successfully and was + accepted for provisioning when the capacity + becomes available. + CANCELLED (41957681): + The request is cancelled. + CREATING (455564985): + Resize request is being created and may still + fail creation. + FAILED (455706685): + The request failed before or during + provisioning. If the request fails during + provisioning, any VMs that were created during + provisioning are rolled back and removed from + the MIG. + STATE_UNSPECIFIED (470755401): + Default value. This value should never be + returned. + SUCCEEDED (511103553): + The request succeeded. + """ + UNDEFINED_STATE = 0 + ACCEPTED = 246714279 + CANCELLED = 41957681 + CREATING = 455564985 + FAILED = 455706685 + STATE_UNSPECIFIED = 470755401 + SUCCEEDED = 511103553 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + requested_run_duration: "Duration" = proto.Field( + proto.MESSAGE, + number=232146425, + optional=True, + message="Duration", + ) + resize_by: int = proto.Field( + proto.INT32, + number=533735362, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + status: "InstanceGroupManagerResizeRequestStatus" = proto.Field( + proto.MESSAGE, + number=181260274, + optional=True, + message="InstanceGroupManagerResizeRequestStatus", + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class InstanceGroupManagerResizeRequestStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + error (google.cloud.compute_v1.types.Error): + [Output only] Fatal errors encountered during the queueing + or provisioning phases of the ResizeRequest that caused the + transition to the FAILED state. Contrary to the last_attempt + errors, this field is final and errors are never removed + from here, as the ResizeRequest is not going to retry. + + This field is a member of `oneof`_ ``_error``. + last_attempt (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequestStatusLastAttempt): + [Output only] Information about the last attempt to fulfill + the request. The value is temporary since the ResizeRequest + can retry, as long as it's still active and the last attempt + value can either be cleared or replaced with a different + error. Since ResizeRequest retries infrequently, the value + may be stale and no longer show an active problem. The value + is cleared when ResizeRequest transitions to the final state + (becomes inactive). If the final state is FAILED the error + describing it will be storred in the "error" field only. + + This field is a member of `oneof`_ ``_last_attempt``. + """ + + error: "Error" = proto.Field( + proto.MESSAGE, + number=96784904, + optional=True, + message="Error", + ) + last_attempt: "InstanceGroupManagerResizeRequestStatusLastAttempt" = proto.Field( + proto.MESSAGE, + number=434771492, + optional=True, + message="InstanceGroupManagerResizeRequestStatusLastAttempt", + ) + + +class InstanceGroupManagerResizeRequestStatusLastAttempt(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + error (google.cloud.compute_v1.types.Error): + Errors that prevented the ResizeRequest to be + fulfilled. + + This field is a member of `oneof`_ ``_error``. + """ + + error: "Error" = proto.Field( + proto.MESSAGE, + number=96784904, + optional=True, + message="Error", + ) + + +class InstanceGroupManagerResizeRequestsListResponse(proto.Message): + r"""[Output Only] A list of resize requests. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest]): + A list of resize request resources. + kind (str): + [Output Only] Type of the resource. Always + compute#instanceGroupManagerResizeRequestList for a list of + resize requests. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["InstanceGroupManagerResizeRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="InstanceGroupManagerResizeRequest", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class InstanceGroupManagerStatus(proto.Message): r""" @@ -42843,7 +44023,7 @@ class InstanceGroupManagerStatusStateful(proto.Message): This field is a member of `oneof`_ ``_has_stateful_config``. per_instance_configs (google.cloud.compute_v1.types.InstanceGroupManagerStatusStatefulPerInstanceConfigs): [Output Only] Status of per-instance configurations on the - instance. + instances. This field is a member of `oneof`_ ``_per_instance_configs``. """ @@ -44662,6 +45842,101 @@ class InstanceReference(proto.Message): ) +class InstanceSettings(proto.Message): + r"""Represents a Instance Settings resource. You can use instance + settings to configure default settings for Compute Engine VM + instances. For example, you can use it to configure default + machine type of Compute Engine VM instances. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + fingerprint (str): + Specifies a fingerprint for instance + settings, which is essentially a hash of the + instance settings resource's contents and used + for optimistic locking. The fingerprint is + initially generated by Compute Engine and + changes after every request to modify or update + the instance settings resource. You must always + provide an up-to-date fingerprint hash in order + to update or change the resource, otherwise the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve the resource. + + This field is a member of `oneof`_ ``_fingerprint``. + kind (str): + [Output Only] Type of the resource. Always + compute#instance_settings for instance settings. + + This field is a member of `oneof`_ ``_kind``. + metadata (google.cloud.compute_v1.types.InstanceSettingsMetadata): + The metadata key/value pairs assigned to all + the instances in the corresponding scope. + + This field is a member of `oneof`_ ``_metadata``. + zone (str): + [Output Only] URL of the zone where the resource resides You + must specify this field as part of the HTTP request URL. It + is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. + """ + + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + metadata: "InstanceSettingsMetadata" = proto.Field( + proto.MESSAGE, + number=86866735, + optional=True, + message="InstanceSettingsMetadata", + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class InstanceSettingsMetadata(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + items (MutableMapping[str, str]): + A metadata key/value items map. The total + size of all keys and values must be less than + 512KB. + kind (str): + [Output Only] Type of the resource. Always compute#metadata + for metadata. + + This field is a member of `oneof`_ ``_kind``. + """ + + items: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=100526016, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + + class InstanceTemplate(proto.Message): r"""Represents an Instance Template resource. Google Compute Engine has two Instance Template resources: \* @@ -45140,15 +46415,17 @@ class InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(proto.Messag This field is a member of `oneof`_ ``_short_name``. type_ (str): [Output Only] The type of the firewall policy. Can be one of - HIERARCHY, NETWORK, NETWORK_REGIONAL. Check the Type enum - for the list of possible values. + HIERARCHY, NETWORK, NETWORK_REGIONAL, SYSTEM_GLOBAL, + SYSTEM_REGIONAL. Check the Type enum for the list of + possible values. This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): r"""[Output Only] The type of the firewall policy. Can be one of - HIERARCHY, NETWORK, NETWORK_REGIONAL. + HIERARCHY, NETWORK, NETWORK_REGIONAL, SYSTEM_GLOBAL, + SYSTEM_REGIONAL. Values: UNDEFINED_TYPE (0): @@ -45610,12 +46887,16 @@ class Status(proto.Enum): READY (77848963): InstantSnapshot has been created successfully. + UNAVAILABLE (413756464): + InstantSnapshot is currently unavailable and + cannot be used for Disk restoration """ UNDEFINED_STATUS = 0 CREATING = 455564985 DELETING = 528602024 FAILED = 455706685 READY = 77848963 + UNAVAILABLE = 413756464 architecture: str = proto.Field( proto.STRING, @@ -49277,16 +50558,16 @@ class InterconnectRemoteLocationConstraints(proto.Message): [Output Only] Port pair remote location constraints, which can take one of the following values: PORT_PAIR_UNCONSTRAINED_REMOTE_LOCATION, - PORT_PAIR_MATCHING_REMOTE_LOCATION. GCP's API refers only to - individual ports, but the UI uses this field when ordering a - pair of ports, to prevent users from accidentally ordering - something that is incompatible with their cloud provider. - Specifically, when ordering a redundant pair of Cross-Cloud - Interconnect ports, and one of them uses a remote location - with portPairMatchingRemoteLocation set to matching, the UI - requires that both ports use the same remote location. Check - the PortPairRemoteLocation enum for the list of possible - values. + PORT_PAIR_MATCHING_REMOTE_LOCATION. Google Cloud API refers + only to individual ports, but the UI uses this field when + ordering a pair of ports, to prevent users from accidentally + ordering something that is incompatible with their cloud + provider. Specifically, when ordering a redundant pair of + Cross-Cloud Interconnect ports, and one of them uses a + remote location with portPairMatchingRemoteLocation set to + matching, the UI requires that both ports use the same + remote location. Check the PortPairRemoteLocation enum for + the list of possible values. This field is a member of `oneof`_ ``_port_pair_remote_location``. port_pair_vlan (str): @@ -49313,7 +50594,7 @@ class PortPairRemoteLocation(proto.Enum): r"""[Output Only] Port pair remote location constraints, which can take one of the following values: PORT_PAIR_UNCONSTRAINED_REMOTE_LOCATION, - PORT_PAIR_MATCHING_REMOTE_LOCATION. GCP's API refers only to + PORT_PAIR_MATCHING_REMOTE_LOCATION. Google Cloud API refers only to individual ports, but the UI uses this field when ordering a pair of ports, to prevent users from accidentally ordering something that is incompatible with their cloud provider. Specifically, when ordering @@ -51235,9 +52516,9 @@ class ListDisksRequest(proto.Message): ) -class ListErrorsInstanceGroupManagersRequest(proto.Message): - r"""A request message for InstanceGroupManagers.ListErrors. See - the method description for details. +class ListDisksStoragePoolsRequest(proto.Message): + r"""A request message for StoragePools.ListDisks. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -51287,11 +52568,6 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. It must be a string - that meets the requirements in RFC1035, or an unsigned long - integer: must match regexp pattern: - (?:`a-z `__?)|1-9{0,19}. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -51332,10 +52608,10 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + storage_pool (str): + Name of the storage pool to list disks of. zone (str): - The name of the zone where the managed - instance group is located. It should conform to - RFC1035. + The name of the zone for this request. """ filter: str = proto.Field( @@ -51343,10 +52619,6 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -51371,15 +52643,19 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): number=517198390, optional=True, ) + storage_pool: str = proto.Field( + proto.STRING, + number=360473440, + ) zone: str = proto.Field( proto.STRING, number=3744684, ) -class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): - r"""A request message for RegionInstanceGroupManagers.ListErrors. - See the method description for details. +class ListErrorsInstanceGroupManagersRequest(proto.Message): + r"""A request message for InstanceGroupManagers.ListErrors. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -51464,9 +52740,6 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. This - should conform to RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -51477,6 +52750,10 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. """ filter: str = proto.Field( @@ -51507,20 +52784,20 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListExternalVpnGatewaysRequest(proto.Message): - r"""A request message for ExternalVpnGateways.List. See the - method description for details. +class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.ListErrors. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -51570,6 +52847,11 @@ class ListExternalVpnGatewaysRequest(proto.Message): multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It must be a string + that meets the requirements in RFC1035, or an unsigned long + integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -51600,6 +52882,9 @@ class ListExternalVpnGatewaysRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. This + should conform to RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -51617,6 +52902,10 @@ class ListExternalVpnGatewaysRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -51636,6 +52925,10 @@ class ListExternalVpnGatewaysRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -51643,9 +52936,9 @@ class ListExternalVpnGatewaysRequest(proto.Message): ) -class ListFirewallPoliciesRequest(proto.Message): - r"""A request message for FirewallPolicies.List. See the method - description for details. +class ListExternalVpnGatewaysRequest(proto.Message): + r"""A request message for ExternalVpnGateways.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -51723,13 +53016,8 @@ class ListFirewallPoliciesRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - parent_id (str): - Parent ID for this request. The ID can be either be - "folders/[FOLDER_ID]" if the parent is a folder or - "organizations/[ORGANIZATION_ID]" if the parent is an - organization. - - This field is a member of `oneof`_ ``_parent_id``. + project (str): + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -51762,10 +53050,9 @@ class ListFirewallPoliciesRequest(proto.Message): number=19994697, optional=True, ) - parent_id: str = proto.Field( + project: str = proto.Field( proto.STRING, - number=459714768, - optional=True, + number=227560217, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -51774,8 +53061,8 @@ class ListFirewallPoliciesRequest(proto.Message): ) -class ListFirewallsRequest(proto.Message): - r"""A request message for Firewalls.List. See the method +class ListFirewallPoliciesRequest(proto.Message): + r"""A request message for FirewallPolicies.List. See the method description for details. @@ -51854,8 +53141,13 @@ class ListFirewallsRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. + parent_id (str): + Parent ID for this request. The ID can be either be + "folders/[FOLDER_ID]" if the parent is a folder or + "organizations/[ORGANIZATION_ID]" if the parent is an + organization. + + This field is a member of `oneof`_ ``_parent_id``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -51888,9 +53180,10 @@ class ListFirewallsRequest(proto.Message): number=19994697, optional=True, ) - project: str = proto.Field( + parent_id: str = proto.Field( proto.STRING, - number=227560217, + number=459714768, + optional=True, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -51899,8 +53192,8 @@ class ListFirewallsRequest(proto.Message): ) -class ListForwardingRulesRequest(proto.Message): - r"""A request message for ForwardingRules.List. See the method +class ListFirewallsRequest(proto.Message): + r"""A request message for Firewalls.List. See the method description for details. @@ -51981,8 +53274,6 @@ class ListForwardingRulesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -52019,10 +53310,6 @@ class ListForwardingRulesRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -52030,8 +53317,8 @@ class ListForwardingRulesRequest(proto.Message): ) -class ListGlobalAddressesRequest(proto.Message): - r"""A request message for GlobalAddresses.List. See the method +class ListForwardingRulesRequest(proto.Message): + r"""A request message for ForwardingRules.List. See the method description for details. @@ -52112,6 +53399,8 @@ class ListGlobalAddressesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -52148,6 +53437,10 @@ class ListGlobalAddressesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -52155,9 +53448,9 @@ class ListGlobalAddressesRequest(proto.Message): ) -class ListGlobalForwardingRulesRequest(proto.Message): - r"""A request message for GlobalForwardingRules.List. See the - method description for details. +class ListGlobalAddressesRequest(proto.Message): + r"""A request message for GlobalAddresses.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -52280,9 +53573,9 @@ class ListGlobalForwardingRulesRequest(proto.Message): ) -class ListGlobalNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for GlobalNetworkEndpointGroups.List. See - the method description for details. +class ListGlobalForwardingRulesRequest(proto.Message): + r"""A request message for GlobalForwardingRules.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -52405,9 +53698,9 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): ) -class ListGlobalOperationsRequest(proto.Message): - r"""A request message for GlobalOperations.List. See the method - description for details. +class ListGlobalNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for GlobalNetworkEndpointGroups.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -52530,9 +53823,9 @@ class ListGlobalOperationsRequest(proto.Message): ) -class ListGlobalOrganizationOperationsRequest(proto.Message): - r"""A request message for GlobalOrganizationOperations.List. See - the method description for details. +class ListGlobalOperationsRequest(proto.Message): + r"""A request message for GlobalOperations.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -52610,10 +53903,8 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - parent_id (str): - Parent ID for this request. - - This field is a member of `oneof`_ ``_parent_id``. + project (str): + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -52646,10 +53937,9 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): number=19994697, optional=True, ) - parent_id: str = proto.Field( + project: str = proto.Field( proto.STRING, - number=459714768, - optional=True, + number=227560217, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -52658,8 +53948,8 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): ) -class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): - r"""A request message for GlobalPublicDelegatedPrefixes.List. See +class ListGlobalOrganizationOperationsRequest(proto.Message): + r"""A request message for GlobalOrganizationOperations.List. See the method description for details. @@ -52738,8 +54028,10 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. + parent_id (str): + Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -52772,9 +54064,10 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): number=19994697, optional=True, ) - project: str = proto.Field( + parent_id: str = proto.Field( proto.STRING, - number=227560217, + number=459714768, + optional=True, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -52783,9 +54076,9 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): ) -class ListHealthChecksRequest(proto.Message): - r"""A request message for HealthChecks.List. See the method - description for details. +class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for GlobalPublicDelegatedPrefixes.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -52908,9 +54201,9 @@ class ListHealthChecksRequest(proto.Message): ) -class ListImagesRequest(proto.Message): - r"""A request message for Images.List. See the method description - for details. +class ListHealthChecksRequest(proto.Message): + r"""A request message for HealthChecks.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53033,9 +54326,9 @@ class ListImagesRequest(proto.Message): ) -class ListInstanceGroupManagersRequest(proto.Message): - r"""A request message for InstanceGroupManagers.List. See the - method description for details. +class ListImagesRequest(proto.Message): + r"""A request message for Images.List. See the method description + for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53125,9 +54418,6 @@ class ListInstanceGroupManagersRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone where the managed - instance group is located. """ filter: str = proto.Field( @@ -53159,14 +54449,11 @@ class ListInstanceGroupManagersRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListInstanceGroupsRequest(proto.Message): - r"""A request message for InstanceGroups.List. See the method +class ListInstanceGroupManagerResizeRequestsRequest(proto.Message): + r"""A request message for + InstanceGroupManagerResizeRequests.List. See the method description for details. @@ -53217,6 +54504,9 @@ class ListInstanceGroupsRequest(proto.Message): multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. The + name should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53258,8 +54548,9 @@ class ListInstanceGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of the zone where the instance group - is located. + The name of the zone where the managed + instance group is located. The name should + conform to RFC1035. """ filter: str = proto.Field( @@ -53267,6 +54558,10 @@ class ListInstanceGroupsRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -53297,9 +54592,9 @@ class ListInstanceGroupsRequest(proto.Message): ) -class ListInstanceTemplatesRequest(proto.Message): - r"""A request message for InstanceTemplates.List. See the method - description for details. +class ListInstanceGroupManagersRequest(proto.Message): + r"""A request message for InstanceGroupManagers.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53389,6 +54684,9 @@ class ListInstanceTemplatesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the managed + instance group is located. """ filter: str = proto.Field( @@ -53420,11 +54718,15 @@ class ListInstanceTemplatesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListInstancesInstanceGroupsRequest(proto.Message): - r"""A request message for InstanceGroups.ListInstances. See the - method description for details. +class ListInstanceGroupsRequest(proto.Message): + r"""A request message for InstanceGroups.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53474,11 +54776,6 @@ class ListInstancesInstanceGroupsRequest(proto.Message): multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group (str): - The name of the instance group from which you - want to generate a list of included instances. - instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest): - The body resource for this request max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53529,15 +54826,6 @@ class ListInstancesInstanceGroupsRequest(proto.Message): number=336120696, optional=True, ) - instance_group: str = proto.Field( - proto.STRING, - number=81095253, - ) - instance_groups_list_instances_request_resource: "InstanceGroupsListInstancesRequest" = proto.Field( - proto.MESSAGE, - number=476255263, - message="InstanceGroupsListInstancesRequest", - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -53568,9 +54856,9 @@ class ListInstancesInstanceGroupsRequest(proto.Message): ) -class ListInstancesRegionInstanceGroupsRequest(proto.Message): - r"""A request message for RegionInstanceGroups.ListInstances. See - the method description for details. +class ListInstanceTemplatesRequest(proto.Message): + r"""A request message for InstanceTemplates.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53620,9 +54908,6 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group (str): - Name of the regional instance group for which - we want to list the instances. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53653,10 +54938,6 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. - region_instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest): - The body resource for this request return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -53674,10 +54955,6 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): number=336120696, optional=True, ) - instance_group: str = proto.Field( - proto.STRING, - number=81095253, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -53697,15 +54974,6 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - region_instance_groups_list_instances_request_resource: "RegionInstanceGroupsListInstancesRequest" = proto.Field( - proto.MESSAGE, - number=48239828, - message="RegionInstanceGroupsListInstancesRequest", - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -53713,9 +54981,9 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): ) -class ListInstancesRequest(proto.Message): - r"""A request message for Instances.List. See the method - description for details. +class ListInstancesInstanceGroupsRequest(proto.Message): + r"""A request message for InstanceGroups.ListInstances. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -53765,6 +55033,11 @@ class ListInstancesRequest(proto.Message): multiple fields using regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group (str): + The name of the instance group from which you + want to generate a list of included instances. + instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest): + The body resource for this request max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -53806,7 +55079,8 @@ class ListInstancesRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of the zone for this request. + The name of the zone where the instance group + is located. """ filter: str = proto.Field( @@ -53814,6 +55088,15 @@ class ListInstancesRequest(proto.Message): number=336120696, optional=True, ) + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + instance_groups_list_instances_request_resource: "InstanceGroupsListInstancesRequest" = proto.Field( + proto.MESSAGE, + number=476255263, + message="InstanceGroupsListInstancesRequest", + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -53844,8 +55127,284 @@ class ListInstancesRequest(proto.Message): ) -class ListInstantSnapshotsRequest(proto.Message): - r"""A request message for InstantSnapshots.List. See the method +class ListInstancesRegionInstanceGroupsRequest(proto.Message): + r"""A request message for RegionInstanceGroups.ListInstances. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + instance_group (str): + Name of the regional instance group for which + we want to list the instances. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest): + The body resource for this request + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. For example, when + partial success behavior is enabled, + aggregatedList for a single zone scope either + returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_groups_list_instances_request_resource: "RegionInstanceGroupsListInstancesRequest" = proto.Field( + proto.MESSAGE, + number=48239828, + message="RegionInstanceGroupsListInstancesRequest", + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListInstancesRequest(proto.Message): + r"""A request message for Instances.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. For example, when + partial success behavior is enabled, + aggregatedList for a single zone scope either + returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListInstantSnapshotsRequest(proto.Message): + r"""A request message for InstantSnapshots.List. See the method description for details. @@ -62013,8 +63572,8 @@ class ListSslPoliciesRequest(proto.Message): ) -class ListSubnetworksRequest(proto.Message): - r"""A request message for Subnetworks.List. See the method +class ListStoragePoolTypesRequest(proto.Message): + r"""A request message for StoragePoolTypes.List. See the method description for details. @@ -62095,8 +63654,6 @@ class ListSubnetworksRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -62107,6 +63664,8 @@ class ListSubnetworksRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -62133,19 +63692,19 @@ class ListSubnetworksRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListTargetGrpcProxiesRequest(proto.Message): - r"""A request message for TargetGrpcProxies.List. See the method +class ListStoragePoolsRequest(proto.Message): + r"""A request message for StoragePools.List. See the method description for details. @@ -62236,6 +63795,8 @@ class ListTargetGrpcProxiesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -62267,10 +63828,14 @@ class ListTargetGrpcProxiesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListTargetHttpProxiesRequest(proto.Message): - r"""A request message for TargetHttpProxies.List. See the method +class ListSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.List. See the method description for details. @@ -62351,6 +63916,8 @@ class ListTargetHttpProxiesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -62387,6 +63954,10 @@ class ListTargetHttpProxiesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -62394,8 +63965,8 @@ class ListTargetHttpProxiesRequest(proto.Message): ) -class ListTargetHttpsProxiesRequest(proto.Message): - r"""A request message for TargetHttpsProxies.List. See the method +class ListTargetGrpcProxiesRequest(proto.Message): + r"""A request message for TargetGrpcProxies.List. See the method description for details. @@ -62519,8 +64090,8 @@ class ListTargetHttpsProxiesRequest(proto.Message): ) -class ListTargetInstancesRequest(proto.Message): - r"""A request message for TargetInstances.List. See the method +class ListTargetHttpProxiesRequest(proto.Message): + r"""A request message for TargetHttpProxies.List. See the method description for details. @@ -62611,8 +64182,6 @@ class ListTargetInstancesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - Name of the zone scoping this request. """ filter: str = proto.Field( @@ -62644,14 +64213,10 @@ class ListTargetInstancesRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListTargetPoolsRequest(proto.Message): - r"""A request message for TargetPools.List. See the method +class ListTargetHttpsProxiesRequest(proto.Message): + r"""A request message for TargetHttpsProxies.List. See the method description for details. @@ -62732,8 +64297,6 @@ class ListTargetPoolsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -62770,10 +64333,6 @@ class ListTargetPoolsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -62781,8 +64340,8 @@ class ListTargetPoolsRequest(proto.Message): ) -class ListTargetSslProxiesRequest(proto.Message): - r"""A request message for TargetSslProxies.List. See the method +class ListTargetInstancesRequest(proto.Message): + r"""A request message for TargetInstances.List. See the method description for details. @@ -62873,6 +64432,8 @@ class ListTargetSslProxiesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone scoping this request. """ filter: str = proto.Field( @@ -62904,10 +64465,14 @@ class ListTargetSslProxiesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListTargetTcpProxiesRequest(proto.Message): - r"""A request message for TargetTcpProxies.List. See the method +class ListTargetPoolsRequest(proto.Message): + r"""A request message for TargetPools.List. See the method description for details. @@ -62988,6 +64553,8 @@ class ListTargetTcpProxiesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -63024,6 +64591,10 @@ class ListTargetTcpProxiesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -63031,8 +64602,8 @@ class ListTargetTcpProxiesRequest(proto.Message): ) -class ListTargetVpnGatewaysRequest(proto.Message): - r"""A request message for TargetVpnGateways.List. See the method +class ListTargetSslProxiesRequest(proto.Message): + r"""A request message for TargetSslProxies.List. See the method description for details. @@ -63113,8 +64684,6 @@ class ListTargetVpnGatewaysRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -63151,10 +64720,6 @@ class ListTargetVpnGatewaysRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -63162,8 +64727,8 @@ class ListTargetVpnGatewaysRequest(proto.Message): ) -class ListUrlMapsRequest(proto.Message): - r"""A request message for UrlMaps.List. See the method +class ListTargetTcpProxiesRequest(proto.Message): + r"""A request message for TargetTcpProxies.List. See the method description for details. @@ -63287,9 +64852,9 @@ class ListUrlMapsRequest(proto.Message): ) -class ListUsableBackendServicesRequest(proto.Message): - r"""A request message for BackendServices.ListUsable. See the - method description for details. +class ListTargetVpnGatewaysRequest(proto.Message): + r"""A request message for TargetVpnGateways.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -63369,6 +64934,8 @@ class ListUsableBackendServicesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -63405,6 +64972,10 @@ class ListUsableBackendServicesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -63412,9 +64983,9 @@ class ListUsableBackendServicesRequest(proto.Message): ) -class ListUsableRegionBackendServicesRequest(proto.Message): - r"""A request message for RegionBackendServices.ListUsable. See - the method description for details. +class ListUrlMapsRequest(proto.Message): + r"""A request message for UrlMaps.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -63494,10 +65065,6 @@ class ListUsableRegionBackendServicesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. It - must be a string that meets the requirements in - RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -63534,10 +65101,6 @@ class ListUsableRegionBackendServicesRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -63545,9 +65108,9 @@ class ListUsableRegionBackendServicesRequest(proto.Message): ) -class ListUsableSubnetworksRequest(proto.Message): - r"""A request message for Subnetworks.ListUsable. See the method - description for details. +class ListUsableBackendServicesRequest(proto.Message): + r"""A request message for BackendServices.ListUsable. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -63670,9 +65233,267 @@ class ListUsableSubnetworksRequest(proto.Message): ) -class ListVpnGatewaysRequest(proto.Message): - r"""A request message for VpnGateways.List. See the method - description for details. +class ListUsableRegionBackendServicesRequest(proto.Message): + r"""A request message for RegionBackendServices.ListUsable. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. It + must be a string that meets the requirements in + RFC1035. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. For example, when + partial success behavior is enabled, + aggregatedList for a single zone scope either + returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListUsableSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.ListUsable. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. If you want to use AIP-160, your + expression must specify the field name, an operator, and the + value that you want to use for filtering. The value must be + a string, a number, or a boolean. The operator must be + either ``=``, ``!=``, ``>``, ``<``, ``<=``, ``>=`` or ``:``. + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. The ``:*`` + comparison can be used to test whether a key has been + defined. For example, to find all objects with ``owner`` + label use: ``labels.owner:*`` You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` The + literal value is interpreted as a regular expression using + Google RE2 library syntax. The literal value must match the + entire field. For example, to filter for instances that do + not end with name "instance", you would use + ``name ne .*instance``. You cannot combine constraints on + multiple fields using regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. For example, when + partial success behavior is enabled, + aggregatedList for a single zone scope either + returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListVpnGatewaysRequest(proto.Message): + r"""A request message for VpnGateways.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -67517,9 +69338,9 @@ class NetworkEndpointGroup(proto.Message): Type of network endpoints in this network endpoint group. Can be one of GCE_VM_IP, GCE_VM_IP_PORT, NON_GCP_PRIVATE_IP_PORT, INTERNET_FQDN_PORT, - INTERNET_IP_PORT, SERVERLESS, PRIVATE_SERVICE_CONNECT. Check - the NetworkEndpointType enum for the list of possible - values. + INTERNET_IP_PORT, SERVERLESS, PRIVATE_SERVICE_CONNECT, + GCE_VM_IP_PORTMAP. Check the NetworkEndpointType enum for + the list of possible values. This field is a member of `oneof`_ ``_network_endpoint_type``. psc_data (google.cloud.compute_v1.types.NetworkEndpointGroupPscData): @@ -67562,7 +69383,7 @@ class NetworkEndpointType(proto.Enum): r"""Type of network endpoints in this network endpoint group. Can be one of GCE_VM_IP, GCE_VM_IP_PORT, NON_GCP_PRIVATE_IP_PORT, INTERNET_FQDN_PORT, INTERNET_IP_PORT, SERVERLESS, - PRIVATE_SERVICE_CONNECT. + PRIVATE_SERVICE_CONNECT, GCE_VM_IP_PORTMAP. Values: UNDEFINED_NETWORK_ENDPOINT_TYPE (0): @@ -69207,6 +71028,13 @@ class NodeGroup(proto.Message): NodeTemplate. This field is a member of `oneof`_ ``_location_hint``. + maintenance_interval (str): + Specifies the frequency of planned maintenance events. The + accepted values are: ``AS_NEEDED`` and ``RECURRENT``. Check + the MaintenanceInterval enum for the list of possible + values. + + This field is a member of `oneof`_ ``_maintenance_interval``. maintenance_policy (str): Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, @@ -69259,6 +71087,34 @@ class NodeGroup(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class MaintenanceInterval(proto.Enum): + r"""Specifies the frequency of planned maintenance events. The accepted + values are: ``AS_NEEDED`` and ``RECURRENT``. + + Values: + UNDEFINED_MAINTENANCE_INTERVAL (0): + A value indicating that the enum field is not + set. + AS_NEEDED (500724834): + VMs are eligible to receive infrastructure + and hypervisor updates as they become available. + This may result in more maintenance operations + (live migrations or terminations) for the VM + than the PERIODIC and RECURRENT options. + RECURRENT (194244550): + VMs receive infrastructure and hypervisor updates on a + periodic basis, minimizing the number of maintenance + operations (live migrations or terminations) on an + individual VM. This may mean a VM will take longer to + receive an update than if it was configured for AS_NEEDED. + Security updates will still be applied as soon as they are + available. RECURRENT is used for GEN3 and Slice of Hardware + VMs. + """ + UNDEFINED_MAINTENANCE_INTERVAL = 0 + AS_NEEDED = 500724834 + RECURRENT = 194244550 + class MaintenancePolicy(proto.Enum): r"""Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or @@ -69354,6 +71210,11 @@ class Status(proto.Enum): number=350519505, optional=True, ) + maintenance_interval: str = proto.Field( + proto.STRING, + number=403368049, + optional=True, + ) maintenance_policy: str = proto.Field( proto.STRING, number=528327646, @@ -69717,6 +71578,11 @@ class NodeGroupNode(proto.Message): node. This field is a member of `oneof`_ ``_total_resources``. + upcoming_maintenance (google.cloud.compute_v1.types.UpcomingMaintenance): + [Output Only] The information about an upcoming maintenance + event. + + This field is a member of `oneof`_ ``_upcoming_maintenance``. """ class CpuOvercommitType(proto.Enum): @@ -69832,6 +71698,12 @@ class Status(proto.Enum): optional=True, message="InstanceConsumptionInfo", ) + upcoming_maintenance: "UpcomingMaintenance" = proto.Field( + proto.MESSAGE, + number=227348592, + optional=True, + message="UpcomingMaintenance", + ) class NodeGroupsAddNodesRequest(proto.Message): @@ -69943,6 +71815,32 @@ def raw_page(self): ) +class NodeGroupsPerformMaintenanceRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + nodes (MutableSequence[str]): + [Required] List of nodes affected by the call. + start_time (str): + The start time of the schedule. The timestamp + is an RFC3339 string. + + This field is a member of `oneof`_ ``_start_time``. + """ + + nodes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=104993457, + ) + start_time: str = proto.Field( + proto.STRING, + number=37467274, + optional=True, + ) + + class NodeGroupsScopedList(proto.Message): r""" @@ -71852,7 +73750,7 @@ class PacketMirroring(proto.Message): This field is a member of `oneof`_ ``_enable``. filter (google.cloud.compute_v1.types.PacketMirroringFilter): Filter for mirrored traffic. If unspecified, - all traffic is mirrored. + all IPv4 traffic is mirrored. This field is a member of `oneof`_ ``_filter``. id (int): @@ -72097,15 +73995,14 @@ class PacketMirroringFilter(proto.Message): mirrored. cidr_ranges (MutableSequence[str]): One or more IPv4 or IPv6 CIDR ranges that - apply as filter on the source (ingress) or + apply as filters on the source (ingress) or destination (egress) IP in the IP header. If no ranges are specified, all IPv4 traffic that matches the specified IPProtocols is mirrored. If neither cidrRanges nor IPProtocols is specified, all IPv4 traffic is mirrored. To mirror all IPv4 and IPv6 traffic, use - "0.0.0.0/0,::/0". Note: Support for IPv6 traffic - is in preview. + "0.0.0.0/0,::/0". direction (str): Direction of traffic to mirror, either INGRESS, EGRESS, or BOTH. The default is BOTH. @@ -73030,22 +74927,18 @@ class PatchInstanceGroupManagerRequest(proto.Message): ) -class PatchInterconnectAttachmentRequest(proto.Message): - r"""A request message for InterconnectAttachments.Patch. See the +class PatchInstanceSettingRequest(proto.Message): + r"""A request message for InstanceSettingsService.Patch. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - interconnect_attachment (str): - Name of the interconnect attachment to patch. - interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): + instance_settings_resource (google.cloud.compute_v1.types.InstanceSettings): The body resource for this request project (str): Project ID for this request. - region (str): - Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -73064,74 +74957,20 @@ class PatchInterconnectAttachmentRequest(proto.Message): 00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - """ - - interconnect_attachment: str = proto.Field( - proto.STRING, - number=308135284, - ) - interconnect_attachment_resource: "InterconnectAttachment" = proto.Field( - proto.MESSAGE, - number=212341369, - message="InterconnectAttachment", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - - -class PatchInterconnectRequest(proto.Message): - r"""A request message for Interconnects.Patch. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - interconnect (str): - Name of the interconnect to update. - interconnect_resource (google.cloud.compute_v1.types.Interconnect): - The body resource for this request - project (str): - Project ID for this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). + update_mask (str): + update_mask indicates fields to be updated as part of this + request. - This field is a member of `oneof`_ ``_request_id``. + This field is a member of `oneof`_ ``_update_mask``. + zone (str): + The zone scoping this request. It should + conform to RFC1035. """ - interconnect: str = proto.Field( - proto.STRING, - number=224601230, - ) - interconnect_resource: "Interconnect" = proto.Field( + instance_settings_resource: "InstanceSettings" = proto.Field( proto.MESSAGE, - number=397611167, - message="Interconnect", + number=290689920, + message="InstanceSettings", ) project: str = proto.Field( proto.STRING, @@ -73142,83 +74981,29 @@ class PatchInterconnectRequest(proto.Message): number=37109963, optional=True, ) - - -class PatchNetworkAttachmentRequest(proto.Message): - r"""A request message for NetworkAttachments.Patch. See the - method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - network_attachment (str): - Name of the NetworkAttachment resource to - patch. - network_attachment_resource (google.cloud.compute_v1.types.NetworkAttachment): - The body resource for this request - project (str): - Project ID for this request. - region (str): - Name of the region for this request. - request_id (str): - An optional request ID to identify requests. Specify a - unique request ID so that if you must retry your request, - the server will know to ignore the request if it has already - been completed. For example, consider a situation where you - make an initial request and the request times out. If you - make the request again with the same request ID, the server - can check if original operation with the same request ID was - received, and if so, will ignore the second request. This - prevents clients from accidentally creating duplicate - commitments. The request ID must be a valid UUID with the - exception that zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). end_interface: - MixerMutationRequestBuilder - - This field is a member of `oneof`_ ``_request_id``. - """ - - network_attachment: str = proto.Field( - proto.STRING, - number=224644052, - ) - network_attachment_resource: "NetworkAttachment" = proto.Field( - proto.MESSAGE, - number=210974745, - message="NetworkAttachment", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( + update_mask: str = proto.Field( proto.STRING, - number=138946292, + number=500079778, + optional=True, ) - request_id: str = proto.Field( + zone: str = proto.Field( proto.STRING, - number=37109963, - optional=True, + number=3744684, ) -class PatchNetworkEdgeSecurityServiceRequest(proto.Message): - r"""A request message for NetworkEdgeSecurityServices.Patch. See - the method description for details. +class PatchInterconnectAttachmentRequest(proto.Message): + r"""A request message for InterconnectAttachments.Patch. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - network_edge_security_service (str): - Name of the network edge security service to - update. - network_edge_security_service_resource (google.cloud.compute_v1.types.NetworkEdgeSecurityService): + interconnect_attachment (str): + Name of the interconnect attachment to patch. + interconnect_attachment_resource (google.cloud.compute_v1.types.InterconnectAttachment): The body resource for this request - paths (str): - - This field is a member of `oneof`_ ``_paths``. project (str): Project ID for this request. region (str): @@ -73241,26 +75026,16 @@ class PatchNetworkEdgeSecurityServiceRequest(proto.Message): 00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - update_mask (str): - Indicates fields to be updated as part of - this request. - - This field is a member of `oneof`_ ``_update_mask``. """ - network_edge_security_service: str = proto.Field( + interconnect_attachment: str = proto.Field( proto.STRING, - number=157011879, + number=308135284, ) - network_edge_security_service_resource: "NetworkEdgeSecurityService" = proto.Field( + interconnect_attachment_resource: "InterconnectAttachment" = proto.Field( proto.MESSAGE, - number=477548966, - message="NetworkEdgeSecurityService", - ) - paths: str = proto.Field( - proto.STRING, - number=106438894, - optional=True, + number=212341369, + message="InterconnectAttachment", ) project: str = proto.Field( proto.STRING, @@ -73275,78 +75050,265 @@ class PatchNetworkEdgeSecurityServiceRequest(proto.Message): number=37109963, optional=True, ) - update_mask: str = proto.Field( - proto.STRING, - number=500079778, - optional=True, - ) - - -class PatchNetworkFirewallPolicyRequest(proto.Message): - r"""A request message for NetworkFirewallPolicies.Patch. See the - method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - firewall_policy (str): - Name of the firewall policy to update. - firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): - The body resource for this request - project (str): - Project ID for this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - """ - - firewall_policy: str = proto.Field( - proto.STRING, - number=498173265, - ) - firewall_policy_resource: "FirewallPolicy" = proto.Field( - proto.MESSAGE, - number=495049532, - message="FirewallPolicy", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) -class PatchNetworkRequest(proto.Message): - r"""A request message for Networks.Patch. See the method +class PatchInterconnectRequest(proto.Message): + r"""A request message for Interconnects.Patch. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - network (str): - Name of the network to update. - network_resource (google.cloud.compute_v1.types.Network): + interconnect (str): + Name of the interconnect to update. + interconnect_resource (google.cloud.compute_v1.types.Interconnect): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + interconnect: str = proto.Field( + proto.STRING, + number=224601230, + ) + interconnect_resource: "Interconnect" = proto.Field( + proto.MESSAGE, + number=397611167, + message="Interconnect", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_attachment (str): + Name of the NetworkAttachment resource to + patch. + network_attachment_resource (google.cloud.compute_v1.types.NetworkAttachment): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. For example, consider a situation where you + make an initial request and the request times out. If you + make the request again with the same request ID, the server + can check if original operation with the same request ID was + received, and if so, will ignore the second request. This + prevents clients from accidentally creating duplicate + commitments. The request ID must be a valid UUID with the + exception that zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). end_interface: + MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. + """ + + network_attachment: str = proto.Field( + proto.STRING, + number=224644052, + ) + network_attachment_resource: "NetworkAttachment" = proto.Field( + proto.MESSAGE, + number=210974745, + message="NetworkAttachment", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchNetworkEdgeSecurityServiceRequest(proto.Message): + r"""A request message for NetworkEdgeSecurityServices.Patch. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network_edge_security_service (str): + Name of the network edge security service to + update. + network_edge_security_service_resource (google.cloud.compute_v1.types.NetworkEdgeSecurityService): + The body resource for this request + paths (str): + + This field is a member of `oneof`_ ``_paths``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + update_mask (str): + Indicates fields to be updated as part of + this request. + + This field is a member of `oneof`_ ``_update_mask``. + """ + + network_edge_security_service: str = proto.Field( + proto.STRING, + number=157011879, + ) + network_edge_security_service_resource: "NetworkEdgeSecurityService" = proto.Field( + proto.MESSAGE, + number=477548966, + message="NetworkEdgeSecurityService", + ) + paths: str = proto.Field( + proto.STRING, + number=106438894, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + + +class PatchNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) + firewall_policy_resource: "FirewallPolicy" = proto.Field( + proto.MESSAGE, + number=495049532, + message="FirewallPolicy", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchNetworkRequest(proto.Message): + r"""A request message for Networks.Patch. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + network (str): + Name of the network to update. + network_resource (google.cloud.compute_v1.types.Network): The body resource for this request project (str): Project ID for this request. @@ -75361,11 +77323,13 @@ class PathMatcher(proto.Message): weightedBackendServices, defaultService must not be set. Conversely if defaultService is set, defaultRouteAction cannot contain any - weightedBackendServices. Only one of - defaultRouteAction or defaultUrlRedirect must be - set. URL maps for classic Application Load - Balancers only support the urlRewrite action - within a path matcher's defaultRouteAction. + weightedBackendServices. If defaultRouteAction + is specified, don't set defaultUrlRedirect. If + defaultRouteAction.weightedBackendServices is + specified, don't set defaultService. URL maps + for classic Application Load Balancers only + support the urlRewrite action within a path + matcher's defaultRouteAction. This field is a member of `oneof`_ ``_default_route_action``. default_service (str): @@ -75383,9 +77347,9 @@ class PathMatcher(proto.Message): specified, defaultRouteAction cannot contain any weightedBackendServices. Conversely, if defaultRouteAction specifies any weightedBackendServices, defaultService must - not be specified. Only one of defaultService, - defaultUrlRedirect , or - defaultRouteAction.weightedBackendService must be set. + not be specified. If defaultService is specified, then set + either defaultUrlRedirect or + defaultRouteAction.weightedBackendService. Don't set both. Authorization requires one or more of the following Google IAM permissions on the specified resource default_service: - compute.backendBuckets.use - compute.backendServices.use @@ -75395,10 +77359,10 @@ class PathMatcher(proto.Message): When none of the specified pathRules or routeRules match, the request is redirected to a URL specified by defaultUrlRedirect. If - defaultUrlRedirect is specified, defaultService - or defaultRouteAction must not be set. Not - supported when the URL map is bound to a target - gRPC proxy. + defaultUrlRedirect is specified, then set either + defaultService or defaultRouteAction. Don't set + both. Not supported when the URL map is bound to + a target gRPC proxy. This field is a member of `oneof`_ ``_default_url_redirect``. description (str): @@ -75728,6 +77692,66 @@ class PerformMaintenanceInstanceRequest(proto.Message): ) +class PerformMaintenanceNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.PerformMaintenance. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + node_group (str): + Name of the node group scoping this request. + node_groups_perform_maintenance_request_resource (google.cloud.compute_v1.types.NodeGroupsPerformMaintenanceRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) + node_groups_perform_maintenance_request_resource: "NodeGroupsPerformMaintenanceRequest" = proto.Field( + proto.MESSAGE, + number=185310294, + message="NodeGroupsPerformMaintenanceRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class Policy(proto.Message): r"""An Identity and Access Management (IAM) policy, which specifies access controls for Google Cloud resources. A ``Policy`` is a @@ -77014,6 +79038,14 @@ class PublicDelegatedPrefix(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + allocatable_prefix_length (int): + The allocatable prefix length supported by + this public delegated prefix. This field is + optional and cannot be set for prefixes in + DELEGATION mode. It cannot be set for IPv4 + prefixes either, and it always defaults to 32. + + This field is a member of `oneof`_ ``_allocatable_prefix_length``. byoip_api_version (str): [Output Only] The version of BYOIP API. Check the ByoipApiVersion enum for the list of possible values. @@ -77061,6 +79093,12 @@ class PublicDelegatedPrefix(proto.Message): compute#publicDelegatedPrefix for public delegated prefixes. This field is a member of `oneof`_ ``_kind``. + mode (str): + The public delegated prefix mode for IPv6 + only. Check the Mode enum for the list of + possible values. + + This field is a member of `oneof`_ ``_mode``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -77127,6 +79165,26 @@ class ByoipApiVersion(proto.Enum): V1 = 2715 V2 = 2716 + class Mode(proto.Enum): + r"""The public delegated prefix mode for IPv6 only. + + Values: + UNDEFINED_MODE (0): + A value indicating that the enum field is not + set. + DELEGATION (264149288): + The public delegated prefix is used for + further sub-delegation only. Such prefixes + cannot set allocatablePrefixLength. + EXTERNAL_IPV6_FORWARDING_RULE_CREATION (398684356): + The public delegated prefix is used for + creating forwarding rules only. Such prefixes + cannot set publicDelegatedSubPrefixes. + """ + UNDEFINED_MODE = 0 + DELEGATION = 264149288 + EXTERNAL_IPV6_FORWARDING_RULE_CREATION = 398684356 + class Status(proto.Enum): r"""[Output Only] The status of the public delegated prefix, which can be one of following values: - ``INITIALIZING`` The public delegated @@ -77166,6 +79224,11 @@ class Status(proto.Enum): INITIALIZING = 306588749 READY_TO_ANNOUNCE = 64641265 + allocatable_prefix_length: int = proto.Field( + proto.INT32, + number=38427446, + optional=True, + ) byoip_api_version: str = proto.Field( proto.STRING, number=162683283, @@ -77206,6 +79269,11 @@ class Status(proto.Enum): number=3292052, optional=True, ) + mode: str = proto.Field( + proto.STRING, + number=3357091, + optional=True, + ) name: str = proto.Field( proto.STRING, number=3373707, @@ -77404,6 +79472,11 @@ class PublicDelegatedPrefixPublicDelegatedSubPrefix(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + allocatable_prefix_length (int): + The allocatable prefix length supported by + this PublicDelegatedSubPrefix. + + This field is a member of `oneof`_ ``_allocatable_prefix_length``. delegatee_project (str): Name of the project scoping this PublicDelegatedSubPrefix. @@ -77425,6 +79498,12 @@ class PublicDelegatedPrefixPublicDelegatedSubPrefix(proto.Message): Address resources in the delegatee project. This field is a member of `oneof`_ ``_is_address``. + mode (str): + The PublicDelegatedSubPrefix mode for IPv6 + only. Check the Mode enum for the list of + possible values. + + This field is a member of `oneof`_ ``_mode``. name (str): The name of the sub public delegated prefix. @@ -77441,6 +79520,26 @@ class PublicDelegatedPrefixPublicDelegatedSubPrefix(proto.Message): This field is a member of `oneof`_ ``_status``. """ + class Mode(proto.Enum): + r"""The PublicDelegatedSubPrefix mode for IPv6 only. + + Values: + UNDEFINED_MODE (0): + A value indicating that the enum field is not + set. + DELEGATION (264149288): + The public delegated prefix is used for + further sub-delegation only. Such prefixes + cannot set allocatablePrefixLength. + EXTERNAL_IPV6_FORWARDING_RULE_CREATION (398684356): + The public delegated prefix is used for + creating forwarding rules only. Such prefixes + cannot set publicDelegatedSubPrefixes. + """ + UNDEFINED_MODE = 0 + DELEGATION = 264149288 + EXTERNAL_IPV6_FORWARDING_RULE_CREATION = 398684356 + class Status(proto.Enum): r"""[Output Only] The status of the sub public delegated prefix. @@ -77457,6 +79556,11 @@ class Status(proto.Enum): ACTIVE = 314733318 INACTIVE = 270421099 + allocatable_prefix_length: int = proto.Field( + proto.INT32, + number=38427446, + optional=True, + ) delegatee_project: str = proto.Field( proto.STRING, number=414860634, @@ -77477,6 +79581,11 @@ class Status(proto.Enum): number=352617951, optional=True, ) + mode: str = proto.Field( + proto.STRING, + number=3357091, + optional=True, + ) name: str = proto.Field( proto.STRING, number=3373707, @@ -77660,6 +79769,12 @@ class Metric(proto.Enum): No description available. GPUS_ALL_REGIONS (39387177): No description available. + HDB_TOTAL_GB (319316271): + No description available. + HDB_TOTAL_IOPS (309720317): + No description available. + HDB_TOTAL_THROUGHPUT (20981374): + No description available. HEALTH_CHECKS (289347502): No description available. IMAGES (15562360): @@ -77806,6 +79921,8 @@ class Metric(proto.Enum): No description available. REGIONAL_INTERNAL_MANAGED_BACKEND_SERVICES (96282539): No description available. + REGIONAL_INTERNAL_TRAFFIC_DIRECTOR_BACKEND_SERVICES (483162968): + No description available. RESERVATIONS (32644647): No description available. RESOURCE_POLICIES (83955297): @@ -77835,6 +79952,8 @@ class Metric(proto.Enum): No description available. SSL_CERTIFICATES (378372399): No description available. + SSL_POLICIES (523254339): + No description available. STATIC_ADDRESSES (93624049): No description available. STATIC_BYOIP_ADDRESSES (275809649): @@ -77869,6 +79988,8 @@ class Metric(proto.Enum): No description available. URL_MAPS (378660743): No description available. + VARIABLE_IPV6_PUBLIC_DELEGATED_PREFIXES (128400161): + No description available. VPN_GATEWAYS (35620282): No description available. VPN_TUNNELS (104327296): @@ -77928,6 +80049,9 @@ class Metric(proto.Enum): GLOBAL_INTERNAL_MANAGED_BACKEND_SERVICES = 256608303 GLOBAL_INTERNAL_TRAFFIC_DIRECTOR_BACKEND_SERVICES = 323514196 GPUS_ALL_REGIONS = 39387177 + HDB_TOTAL_GB = 319316271 + HDB_TOTAL_IOPS = 309720317 + HDB_TOTAL_THROUGHPUT = 20981374 HEALTH_CHECKS = 289347502 IMAGES = 15562360 INSTANCES = 131337822 @@ -78001,6 +80125,7 @@ class Metric(proto.Enum): REGIONAL_INSTANCE_GROUP_MANAGERS = 37543696 REGIONAL_INTERNAL_LB_BACKEND_SERVICES = 137983760 REGIONAL_INTERNAL_MANAGED_BACKEND_SERVICES = 96282539 + REGIONAL_INTERNAL_TRAFFIC_DIRECTOR_BACKEND_SERVICES = 483162968 RESERVATIONS = 32644647 RESOURCE_POLICIES = 83955297 ROUTERS = 493018666 @@ -78015,6 +80140,7 @@ class Metric(proto.Enum): SNAPSHOTS = 343405327 SSD_TOTAL_GB = 161732561 SSL_CERTIFICATES = 378372399 + SSL_POLICIES = 523254339 STATIC_ADDRESSES = 93624049 STATIC_BYOIP_ADDRESSES = 275809649 STATIC_EXTERNAL_IPV6_ADDRESS_RANGES = 472346774 @@ -78032,6 +80158,7 @@ class Metric(proto.Enum): TPU_LITE_PODSLICE_V5 = 12708294 TPU_PODSLICE_V4 = 214467530 URL_MAPS = 378660743 + VARIABLE_IPV6_PUBLIC_DELEGATED_PREFIXES = 128400161 VPN_GATEWAYS = 35620282 VPN_TUNNELS = 104327296 XPN_SERVICE_PROJECTS = 95191981 @@ -78145,6 +80272,177 @@ class RolloutStatus(proto.Enum): ) +class QuotaStatusWarning(proto.Message): + r"""[Output Only] Warning of fetching the ``quotas`` field for this + region. This field is populated only if fetching of the ``quotas`` + field fails. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (str): + [Output Only] A warning code, if applicable. For example, + Compute Engine returns NO_RESULTS_ON_PAGE if there are no + results in the response. Check the Code enum for the list of + possible values. + + This field is a member of `oneof`_ ``_code``. + data (MutableSequence[google.cloud.compute_v1.types.Data]): + [Output Only] Metadata about this warning in key: value + format. For example: "data": [ { "key": "scope", "value": + "zones/us-east1-d" } + message (str): + [Output Only] A human-readable description of the warning + code. + + This field is a member of `oneof`_ ``_message``. + """ + + class Code(proto.Enum): + r"""[Output Only] A warning code, if applicable. For example, Compute + Engine returns NO_RESULTS_ON_PAGE if there are no results in the + response. + + Values: + UNDEFINED_CODE (0): + A value indicating that the enum field is not + set. + CLEANUP_FAILED (150308440): + Warning about failed cleanup of transient + changes made by a failed operation. + DEPRECATED_RESOURCE_USED (391835586): + A link to a deprecated resource was created. + DEPRECATED_TYPE_USED (346526230): + When deploying and at least one of the + resources has a type marked as deprecated + DISK_SIZE_LARGER_THAN_IMAGE_SIZE (369442967): + The user created a boot disk that is larger + than image size. + EXPERIMENTAL_TYPE_USED (451954443): + When deploying and at least one of the + resources has a type marked as experimental + EXTERNAL_API_WARNING (175546307): + Warning that is present in an external api + call + FIELD_VALUE_OVERRIDEN (329669423): + Warning that value of a field has been + overridden. Deprecated unused field. + INJECTED_KERNELS_DEPRECATED (417377419): + The operation involved use of an injected + kernel, which is deprecated. + INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB (401542606): + A WEIGHTED_MAGLEV backend service is associated with a + health check that is not of type HTTP/HTTPS/HTTP2. + LARGE_DEPLOYMENT_WARNING (481440678): + When deploying a deployment with a + exceedingly large number of resources + LIST_OVERHEAD_QUOTA_EXCEED (47618117): + Resource can't be retrieved due to list + overhead quota exceed which captures the amount + of resources filtered out by user-defined list + filter. + MISSING_TYPE_DEPENDENCY (344505463): + A resource depends on a missing type + NEXT_HOP_ADDRESS_NOT_ASSIGNED (324964999): + The route's nextHopIp address is not assigned + to an instance on the network. + NEXT_HOP_CANNOT_IP_FORWARD (383382887): + The route's next hop instance cannot ip + forward. + NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE (146748434): + The route's nextHopInstance URL refers to an + instance that does not have an ipv6 interface on + the same network as the route. + NEXT_HOP_INSTANCE_NOT_FOUND (464250446): + The route's nextHopInstance URL refers to an + instance that does not exist. + NEXT_HOP_INSTANCE_NOT_ON_NETWORK (243758146): + The route's nextHopInstance URL refers to an + instance that is not on the same network as the + route. + NEXT_HOP_NOT_RUNNING (417081265): + The route's next hop instance does not have a + status of RUNNING. + NOT_CRITICAL_ERROR (105763924): + Error which is not critical. We decided to + continue the process despite the mentioned + error. + NO_RESULTS_ON_PAGE (30036744): + No results are present on a particular list + page. + PARTIAL_SUCCESS (39966469): + Success is reported, but some results may be + missing due to errors + REQUIRED_TOS_AGREEMENT (3745539): + The user attempted to use a resource that + requires a TOS they have not accepted. + RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING (496728641): + Warning that a resource is in use. + RESOURCE_NOT_DELETED (168598460): + One or more of the resources set to + auto-delete could not be deleted because they + were in use. + SCHEMA_VALIDATION_IGNORED (275245642): + When a resource schema validation is ignored. + SINGLE_INSTANCE_PROPERTY_TEMPLATE (268305617): + Instance template used in instance group + manager is valid as such, but its application + does not make a lot of sense, because it allows + only single instance in instance group. + UNDECLARED_PROPERTIES (390513439): + When undeclared properties in the schema are + present + UNREACHABLE (13328052): + A given scope cannot be reached. + """ + UNDEFINED_CODE = 0 + CLEANUP_FAILED = 150308440 + DEPRECATED_RESOURCE_USED = 391835586 + DEPRECATED_TYPE_USED = 346526230 + DISK_SIZE_LARGER_THAN_IMAGE_SIZE = 369442967 + EXPERIMENTAL_TYPE_USED = 451954443 + EXTERNAL_API_WARNING = 175546307 + FIELD_VALUE_OVERRIDEN = 329669423 + INJECTED_KERNELS_DEPRECATED = 417377419 + INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB = 401542606 + LARGE_DEPLOYMENT_WARNING = 481440678 + LIST_OVERHEAD_QUOTA_EXCEED = 47618117 + MISSING_TYPE_DEPENDENCY = 344505463 + NEXT_HOP_ADDRESS_NOT_ASSIGNED = 324964999 + NEXT_HOP_CANNOT_IP_FORWARD = 383382887 + NEXT_HOP_INSTANCE_HAS_NO_IPV6_INTERFACE = 146748434 + NEXT_HOP_INSTANCE_NOT_FOUND = 464250446 + NEXT_HOP_INSTANCE_NOT_ON_NETWORK = 243758146 + NEXT_HOP_NOT_RUNNING = 417081265 + NOT_CRITICAL_ERROR = 105763924 + NO_RESULTS_ON_PAGE = 30036744 + PARTIAL_SUCCESS = 39966469 + REQUIRED_TOS_AGREEMENT = 3745539 + RESOURCE_IN_USE_BY_OTHER_RESOURCE_WARNING = 496728641 + RESOURCE_NOT_DELETED = 168598460 + SCHEMA_VALIDATION_IGNORED = 275245642 + SINGLE_INSTANCE_PROPERTY_TEMPLATE = 268305617 + UNDECLARED_PROPERTIES = 390513439 + UNREACHABLE = 13328052 + + code: str = proto.Field( + proto.STRING, + number=3059181, + optional=True, + ) + data: MutableSequence["Data"] = proto.RepeatedField( + proto.MESSAGE, + number=3076010, + message="Data", + ) + message: str = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + + class RawDisk(proto.Message): r"""The parameters of the raw disk image. @@ -78422,6 +80720,12 @@ class Region(proto.Message): [Output Only] Name of the resource. This field is a member of `oneof`_ ``_name``. + quota_status_warning (google.cloud.compute_v1.types.QuotaStatusWarning): + [Output Only] Warning of fetching the ``quotas`` field for + this region. This field is populated only if fetching of the + ``quotas`` field fails. + + This field is a member of `oneof`_ ``_quota_status_warning``. quotas (MutableSequence[google.cloud.compute_v1.types.Quota]): [Output Only] Quotas assigned to this region. self_link (str): @@ -78489,6 +80793,12 @@ class Status(proto.Enum): number=3373707, optional=True, ) + quota_status_warning: "QuotaStatusWarning" = proto.Field( + proto.MESSAGE, + number=302941430, + optional=True, + message="QuotaStatusWarning", + ) quotas: MutableSequence["Quota"] = proto.RepeatedField( proto.MESSAGE, number=125341947, @@ -79653,15 +81963,17 @@ class RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewal The rules that apply to the network. type_ (str): [Output Only] The type of the firewall policy. Can be one of - HIERARCHY, NETWORK, NETWORK_REGIONAL. Check the Type enum - for the list of possible values. + HIERARCHY, NETWORK, NETWORK_REGIONAL, SYSTEM_GLOBAL, + SYSTEM_REGIONAL. Check the Type enum for the list of + possible values. This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): r"""[Output Only] The type of the firewall policy. Can be one of - HIERARCHY, NETWORK, NETWORK_REGIONAL. + HIERARCHY, NETWORK, NETWORK_REGIONAL, SYSTEM_GLOBAL, + SYSTEM_REGIONAL. Values: UNDEFINED_TYPE (0): @@ -81971,8 +84283,7 @@ class ResourcePolicyDiskConsistencyGroupPolicy(proto.Message): class ResourcePolicyGroupPlacementPolicy(proto.Message): r"""A GroupPlacementPolicy specifies resource placement - configuration. It specifies the failure bucket separation as - well as network locality + configuration. It specifies the failure bucket separation .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -83435,6 +85746,16 @@ class RouterBgp(proto.Message): router will have the same local ASN. This field is a member of `oneof`_ ``_asn``. + identifier_range (str): + Explicitly specifies a range of valid BGP + Identifiers for this Router. It is provided as a + link-local IPv4 range (from 169.254.0.0/16), of + size at least /30, even if the BGP sessions are + over IPv6. It must not overlap with any IPv4 BGP + session ranges. Other vendors commonly call this + "router ID". + + This field is a member of `oneof`_ ``_identifier_range``. keepalive_interval (int): The interval in seconds between BGP keepalive messages that are sent to the peer. Hold time is @@ -83504,6 +85825,11 @@ class AdvertisedGroups(proto.Enum): number=96892, optional=True, ) + identifier_range: str = proto.Field( + proto.STRING, + number=501573159, + optional=True, + ) keepalive_interval: int = proto.Field( proto.UINT32, number=276771516, @@ -83573,11 +85899,30 @@ class RouterBgpPeer(proto.Message): Enable enum for the list of possible values. This field is a member of `oneof`_ ``_enable``. + enable_ipv4 (bool): + Enable IPv4 traffic over BGP Peer. It is + enabled by default if the peerIpAddress is + version 4. + + This field is a member of `oneof`_ ``_enable_ipv4``. enable_ipv6 (bool): - Enable IPv6 traffic over BGP Peer. If not - specified, it is disabled by default. + Enable IPv6 traffic over BGP Peer. It is + enabled by default if the peerIpAddress is + version 6. This field is a member of `oneof`_ ``_enable_ipv6``. + export_policies (MutableSequence[str]): + List of export policies applied to this peer, in the order + they must be evaluated. The name must correspond to an + existing policy that has ROUTE_POLICY_TYPE_EXPORT type. Note + that Route Policies are currently available in preview. + Please use Beta API to use Route Policies. + import_policies (MutableSequence[str]): + List of import policies applied to this peer, in the order + they must be evaluated. The name must correspond to an + existing policy that has ROUTE_POLICY_TYPE_IMPORT type. Note + that Route Policies are currently available in preview. + Please use Beta API to use Route Policies. interface_name (str): Name of the interface the BGP peer is associated with. @@ -83585,9 +85930,14 @@ class RouterBgpPeer(proto.Message): This field is a member of `oneof`_ ``_interface_name``. ip_address (str): IP address of the interface inside Google - Cloud Platform. Only IPv4 is supported. + Cloud Platform. This field is a member of `oneof`_ ``_ip_address``. + ipv4_nexthop_address (str): + IPv4 address of the interface inside Google + Cloud Platform. + + This field is a member of `oneof`_ ``_ipv4_nexthop_address``. ipv6_nexthop_address (str): IPv6 address of the interface inside Google Cloud Platform. @@ -83630,9 +85980,14 @@ class RouterBgpPeer(proto.Message): This field is a member of `oneof`_ ``_peer_asn``. peer_ip_address (str): IP address of the BGP interface outside - Google Cloud Platform. Only IPv4 is supported. + Google Cloud Platform. This field is a member of `oneof`_ ``_peer_ip_address``. + peer_ipv4_nexthop_address (str): + IPv4 address of the BGP interface outside + Google Cloud Platform. + + This field is a member of `oneof`_ ``_peer_ipv4_nexthop_address``. peer_ipv6_nexthop_address (str): IPv6 address of the BGP interface outside Google Cloud Platform. @@ -83777,11 +86132,24 @@ class ManagementType(proto.Enum): number=311764355, optional=True, ) + enable_ipv4: bool = proto.Field( + proto.BOOL, + number=181467937, + optional=True, + ) enable_ipv6: bool = proto.Field( proto.BOOL, number=181467939, optional=True, ) + export_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=134084987, + ) + import_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=451147946, + ) interface_name: str = proto.Field( proto.STRING, number=437854673, @@ -83792,6 +86160,11 @@ class ManagementType(proto.Enum): number=406272220, optional=True, ) + ipv4_nexthop_address: str = proto.Field( + proto.STRING, + number=5703377, + optional=True, + ) ipv6_nexthop_address: str = proto.Field( proto.STRING, number=27968211, @@ -83822,6 +86195,11 @@ class ManagementType(proto.Enum): number=207735769, optional=True, ) + peer_ipv4_nexthop_address: str = proto.Field( + proto.STRING, + number=469221774, + optional=True, + ) peer_ipv6_nexthop_address: str = proto.Field( proto.STRING, number=491486608, @@ -83957,15 +86335,29 @@ class RouterInterface(proto.Message): Attributes: ip_range (str): - IP address and range of the interface. The IP - range must be in the RFC3927 link-local IP - address space. The value must be a - CIDR-formatted string, for example: - 169.254.0.1/30. NOTE: Do not truncate the - address as it represents the IP address of the - interface. + IP address and range of the interface. - For + Internet Protocol version 4 (IPv4), the IP range + must be in the RFC3927 link-local IP address + space. The value must be a CIDR-formatted + string, for example, 169.254.0.1/30. Note: Do + not truncate the IP address, as it represents + the IP address of the interface. - For Internet + Protocol version 6 (IPv6), the value must be a + unique local address (ULA) range from + fdff:1::/64 with a mask length of 126 or less. + This value should be a CIDR-formatted string, + for example, fc00:0:1:1::1/112. Within the + router's VPC, this IPv6 prefix will be reserved + exclusively for this connection and cannot be + used for any other purpose. This field is a member of `oneof`_ ``_ip_range``. + ip_version (str): + IP version of this interface. + Check the IpVersion enum for the list of + possible values. + + This field is a member of `oneof`_ ``_ip_version``. linked_interconnect_attachment (str): URI of the linked Interconnect attachment. It must be in the same region as the router. Each @@ -84041,6 +86433,22 @@ class RouterInterface(proto.Message): This field is a member of `oneof`_ ``_subnetwork``. """ + class IpVersion(proto.Enum): + r"""IP version of this interface. + + Values: + UNDEFINED_IP_VERSION (0): + A value indicating that the enum field is not + set. + IPV4 (2254341): + No description available. + IPV6 (2254343): + No description available. + """ + UNDEFINED_IP_VERSION = 0 + IPV4 = 2254341 + IPV6 = 2254343 + class ManagementType(proto.Enum): r"""[Output Only] The resource that configures and manages this interface. - MANAGED_BY_USER is the default value and can be managed @@ -84075,6 +86483,11 @@ class ManagementType(proto.Enum): number=145092645, optional=True, ) + ip_version: str = proto.Field( + proto.STRING, + number=294959552, + optional=True, + ) linked_interconnect_attachment: str = proto.Field( proto.STRING, number=501085518, @@ -84887,15 +87300,26 @@ class RouterStatusBgpPeerStatus(proto.Message): bfd_status (google.cloud.compute_v1.types.BfdStatus): This field is a member of `oneof`_ ``_bfd_status``. + enable_ipv4 (bool): + Enable IPv4 traffic over BGP Peer. It is + enabled by default if the peerIpAddress is + version 4. + + This field is a member of `oneof`_ ``_enable_ipv4``. enable_ipv6 (bool): - Enable IPv6 traffic over BGP Peer. If not - specified, it is disabled by default. + Enable IPv6 traffic over BGP Peer. It is + enabled by default if the peerIpAddress is + version 6. This field is a member of `oneof`_ ``_enable_ipv6``. ip_address (str): IP address of the local BGP interface. This field is a member of `oneof`_ ``_ip_address``. + ipv4_nexthop_address (str): + IPv4 address of the local BGP interface. + + This field is a member of `oneof`_ ``_ipv4_nexthop_address``. ipv6_nexthop_address (str): IPv6 address of the local BGP interface. @@ -84924,6 +87348,10 @@ class RouterStatusBgpPeerStatus(proto.Message): IP address of the remote BGP interface. This field is a member of `oneof`_ ``_peer_ip_address``. + peer_ipv4_nexthop_address (str): + IPv4 address of the remote BGP interface. + + This field is a member of `oneof`_ ``_peer_ipv4_nexthop_address``. peer_ipv6_nexthop_address (str): IPv6 address of the remote BGP interface. @@ -84992,6 +87420,12 @@ class StatusReason(proto.Enum): UNDEFINED_STATUS_REASON (0): A value indicating that the enum field is not set. + IPV4_PEER_ON_IPV6_ONLY_CONNECTION (435936662): + BGP peer disabled because it requires IPv4 + but the underlying connection is IPv6-only. + IPV6_PEER_ON_IPV4_ONLY_CONNECTION (436304082): + BGP peer disabled because it requires IPv6 + but the underlying connection is IPv4-only. MD5_AUTH_INTERNAL_PROBLEM (140462259): Indicates internal problems with configuration of MD5 authentication. This @@ -85001,6 +87435,8 @@ class StatusReason(proto.Enum): No description available. """ UNDEFINED_STATUS_REASON = 0 + IPV4_PEER_ON_IPV6_ONLY_CONNECTION = 435936662 + IPV6_PEER_ON_IPV4_ONLY_CONNECTION = 436304082 MD5_AUTH_INTERNAL_PROBLEM = 140462259 STATUS_REASON_UNSPECIFIED = 394331913 @@ -85015,6 +87451,11 @@ class StatusReason(proto.Enum): optional=True, message="BfdStatus", ) + enable_ipv4: bool = proto.Field( + proto.BOOL, + number=181467937, + optional=True, + ) enable_ipv6: bool = proto.Field( proto.BOOL, number=181467939, @@ -85025,6 +87466,11 @@ class StatusReason(proto.Enum): number=406272220, optional=True, ) + ipv4_nexthop_address: str = proto.Field( + proto.STRING, + number=5703377, + optional=True, + ) ipv6_nexthop_address: str = proto.Field( proto.STRING, number=27968211, @@ -85055,6 +87501,11 @@ class StatusReason(proto.Enum): number=207735769, optional=True, ) + peer_ipv4_nexthop_address: str = proto.Field( + proto.STRING, + number=469221774, + optional=True, + ) peer_ipv6_nexthop_address: str = proto.Field( proto.STRING, number=491486608, @@ -88541,12 +90992,25 @@ class ServiceAttachment(proto.Message): This field is a member of `oneof`_ ``_connection_preference``. consumer_accept_lists (MutableSequence[google.cloud.compute_v1.types.ServiceAttachmentConsumerProjectLimit]): - Projects that are allowed to connect to this - service attachment. + Specifies which consumer projects or networks + are allowed to connect to the service + attachment. Each project or network has a + connection limit. A given service attachment can + manage connections at either the project or + network level. Therefore, both the accept and + reject lists for a given service attachment must + contain either only projects or only networks. consumer_reject_lists (MutableSequence[str]): - Projects that are not allowed to connect to - this service attachment. The project can be - specified using its id or number. + Specifies a list of projects or networks that + are not allowed to connect to this service + attachment. The project can be specified using + its project ID or project number and the network + can be specified using its URL. A given service + attachment can manage connections at either the + project or network level. Therefore, both the + reject and accept lists for a given service + attachment must contain either only projects or + only networks. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. @@ -90546,256 +93010,18 @@ class SetIamPolicySnapshotRequest(proto.Message): ) -class SetIamPolicySubnetworkRequest(proto.Message): - r"""A request message for Subnetworks.SetIamPolicy. See the +class SetIamPolicyStoragePoolRequest(proto.Message): + r"""A request message for StoragePools.SetIamPolicy. See the method description for details. Attributes: project (str): Project ID for this request. - region (str): - The name of the region for this request. - region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): - The body resource for this request - resource (str): - Name or id of the resource for this request. - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - region_set_policy_request_resource: "RegionSetPolicyRequest" = proto.Field( - proto.MESSAGE, - number=276489091, - message="RegionSetPolicyRequest", - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - - -class SetInstanceTemplateInstanceGroupManagerRequest(proto.Message): - r"""A request message for - InstanceGroupManagers.SetInstanceTemplate. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - instance_group_manager (str): - The name of the managed instance group. - instance_group_managers_set_instance_template_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersSetInstanceTemplateRequest): - The body resource for this request - project (str): - Project ID for this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - zone (str): - The name of the zone where the managed - instance group is located. - """ - - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) - instance_group_managers_set_instance_template_request_resource: "InstanceGroupManagersSetInstanceTemplateRequest" = proto.Field( - proto.MESSAGE, - number=9809093, - message="InstanceGroupManagersSetInstanceTemplateRequest", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) - - -class SetInstanceTemplateRegionInstanceGroupManagerRequest(proto.Message): - r"""A request message for - RegionInstanceGroupManagers.SetInstanceTemplate. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - instance_group_manager (str): - The name of the managed instance group. - project (str): - Project ID for this request. - region (str): - Name of the region scoping this request. - region_instance_group_managers_set_template_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTemplateRequest): - The body resource for this request - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - """ - - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - region_instance_group_managers_set_template_request_resource: "RegionInstanceGroupManagersSetTemplateRequest" = proto.Field( - proto.MESSAGE, - number=187310412, - message="RegionInstanceGroupManagersSetTemplateRequest", - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - - -class SetLabelsAddressRequest(proto.Message): - r"""A request message for Addresses.SetLabels. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project (str): - Project ID for this request. - region (str): - The region for this request. - region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): - The body resource for this request - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - resource (str): - Name or id of the resource for this request. - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - region_set_labels_request_resource: "RegionSetLabelsRequest" = proto.Field( - proto.MESSAGE, - number=259357782, - message="RegionSetLabelsRequest", - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - - -class SetLabelsDiskRequest(proto.Message): - r"""A request message for Disks.SetLabels. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project (str): - Project ID for this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. resource (str): Name or id of the resource for this request. zone (str): The name of the zone for this request. - zone_set_labels_request_resource (google.cloud.compute_v1.types.ZoneSetLabelsRequest): + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): The body resource for this request """ @@ -90803,11 +93029,6 @@ class SetLabelsDiskRequest(proto.Message): proto.STRING, number=227560217, ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) resource: str = proto.Field( proto.STRING, number=195806222, @@ -90816,73 +93037,24 @@ class SetLabelsDiskRequest(proto.Message): proto.STRING, number=3744684, ) - zone_set_labels_request_resource: "ZoneSetLabelsRequest" = proto.Field( - proto.MESSAGE, - number=364950798, - message="ZoneSetLabelsRequest", - ) - - -class SetLabelsExternalVpnGatewayRequest(proto.Message): - r"""A request message for ExternalVpnGateways.SetLabels. See the - method description for details. - - Attributes: - global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): - The body resource for this request - project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. - """ - - global_set_labels_request_resource: "GlobalSetLabelsRequest" = proto.Field( + zone_set_policy_request_resource: "ZoneSetPolicyRequest" = proto.Field( proto.MESSAGE, - number=319917189, - message="GlobalSetLabelsRequest", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, + number=382082107, + message="ZoneSetPolicyRequest", ) -class SetLabelsForwardingRuleRequest(proto.Message): - r"""A request message for ForwardingRules.SetLabels. See the +class SetIamPolicySubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.SetIamPolicy. See the method description for details. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - Attributes: project (str): Project ID for this request. region (str): - The region for this request. - region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): The body resource for this request - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. For example, consider a situation - where you make an initial request and the - request times out. If you make the request again - with the same request ID, the server can check - if original operation with the same request ID - was received, and if so, will ignore the second - request. This prevents clients from accidentally - creating duplicate commitments. The request ID - must be a valid UUID with the exception that - zero UUID is not supported ( - 00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. resource (str): Name or id of the resource for this request. """ @@ -90895,99 +93067,10 @@ class SetLabelsForwardingRuleRequest(proto.Message): proto.STRING, number=138946292, ) - region_set_labels_request_resource: "RegionSetLabelsRequest" = proto.Field( - proto.MESSAGE, - number=259357782, - message="RegionSetLabelsRequest", - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - - -class SetLabelsGlobalAddressRequest(proto.Message): - r"""A request message for GlobalAddresses.SetLabels. See the - method description for details. - - Attributes: - global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): - The body resource for this request - project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. - """ - - global_set_labels_request_resource: "GlobalSetLabelsRequest" = proto.Field( - proto.MESSAGE, - number=319917189, - message="GlobalSetLabelsRequest", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - - -class SetLabelsGlobalForwardingRuleRequest(proto.Message): - r"""A request message for GlobalForwardingRules.SetLabels. See - the method description for details. - - Attributes: - global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): - The body resource for this request - project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. - """ - - global_set_labels_request_resource: "GlobalSetLabelsRequest" = proto.Field( - proto.MESSAGE, - number=319917189, - message="GlobalSetLabelsRequest", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - - -class SetLabelsImageRequest(proto.Message): - r"""A request message for Images.SetLabels. See the method - description for details. - - Attributes: - global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): - The body resource for this request - project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. - """ - - global_set_labels_request_resource: "GlobalSetLabelsRequest" = proto.Field( + region_set_policy_request_resource: "RegionSetPolicyRequest" = proto.Field( proto.MESSAGE, - number=319917189, - message="GlobalSetLabelsRequest", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, + number=276489091, + message="RegionSetPolicyRequest", ) resource: str = proto.Field( proto.STRING, @@ -90995,17 +93078,432 @@ class SetLabelsImageRequest(proto.Message): ) -class SetLabelsInstanceRequest(proto.Message): - r"""A request message for Instances.SetLabels. See the method +class SetInstanceTemplateInstanceGroupManagerRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.SetInstanceTemplate. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - instance (str): - Name of the instance scoping this request. - instances_set_labels_request_resource (google.cloud.compute_v1.types.InstancesSetLabelsRequest): + instance_group_manager (str): + The name of the managed instance group. + instance_group_managers_set_instance_template_request_resource (google.cloud.compute_v1.types.InstanceGroupManagersSetInstanceTemplateRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone where the managed + instance group is located. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_managers_set_instance_template_request_resource: "InstanceGroupManagersSetInstanceTemplateRequest" = proto.Field( + proto.MESSAGE, + number=9809093, + message="InstanceGroupManagersSetInstanceTemplateRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class SetInstanceTemplateRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.SetInstanceTemplate. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_group_managers_set_template_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTemplateRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_group_managers_set_template_request_resource: "RegionInstanceGroupManagersSetTemplateRequest" = proto.Field( + proto.MESSAGE, + number=187310412, + message="RegionInstanceGroupManagersSetTemplateRequest", + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class SetLabelsAddressRequest(proto.Message): + r"""A request message for Addresses.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: "RegionSetLabelsRequest" = proto.Field( + proto.MESSAGE, + number=259357782, + message="RegionSetLabelsRequest", + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsDiskRequest(proto.Message): + r"""A request message for Disks.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + zone_set_labels_request_resource (google.cloud.compute_v1.types.ZoneSetLabelsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + zone_set_labels_request_resource: "ZoneSetLabelsRequest" = proto.Field( + proto.MESSAGE, + number=364950798, + message="ZoneSetLabelsRequest", + ) + + +class SetLabelsExternalVpnGatewayRequest(proto.Message): + r"""A request message for ExternalVpnGateways.SetLabels. See the + method description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: "GlobalSetLabelsRequest" = proto.Field( + proto.MESSAGE, + number=319917189, + message="GlobalSetLabelsRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsForwardingRuleRequest(proto.Message): + r"""A request message for ForwardingRules.SetLabels. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: "RegionSetLabelsRequest" = proto.Field( + proto.MESSAGE, + number=259357782, + message="RegionSetLabelsRequest", + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsGlobalAddressRequest(proto.Message): + r"""A request message for GlobalAddresses.SetLabels. See the + method description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: "GlobalSetLabelsRequest" = proto.Field( + proto.MESSAGE, + number=319917189, + message="GlobalSetLabelsRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsGlobalForwardingRuleRequest(proto.Message): + r"""A request message for GlobalForwardingRules.SetLabels. See + the method description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: "GlobalSetLabelsRequest" = proto.Field( + proto.MESSAGE, + number=319917189, + message="GlobalSetLabelsRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsImageRequest(proto.Message): + r"""A request message for Images.SetLabels. See the method + description for details. + + Attributes: + global_set_labels_request_resource (google.cloud.compute_v1.types.GlobalSetLabelsRequest): + The body resource for this request + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + global_set_labels_request_resource: "GlobalSetLabelsRequest" = proto.Field( + proto.MESSAGE, + number=319917189, + message="GlobalSetLabelsRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class SetLabelsInstanceRequest(proto.Message): + r"""A request message for Instances.SetLabels. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance (str): + Name of the instance scoping this request. + instances_set_labels_request_resource (google.cloud.compute_v1.types.InstancesSetLabelsRequest): The body resource for this request project (str): Project ID for this request. @@ -94606,8 +97104,9 @@ class SnapshotSettingsStorageLocationSettings(proto.Message): Attributes: locations (MutableMapping[str, google.cloud.compute_v1.types.SnapshotSettingsStorageLocationSettingsStorageLocationPreference]): When the policy is SPECIFIC_LOCATIONS, snapshots will be - stored in the locations listed in this field. Keys are GCS - bucket locations. + stored in the locations listed in this field. Keys are Cloud + Storage bucket locations. Only one location can be + specified. policy (str): The chosen location policy. Check the Policy enum for the list of possible @@ -94628,9 +97127,10 @@ class Policy(proto.Enum): originating disk. No additional parameters are needed. NEAREST_MULTI_REGION (212467515): - Store snapshot to the nearest multi region - GCS bucket, relative to the originating disk. No - additional parameters are needed. + Store snapshot in the nearest multi region + Cloud Storage bucket, relative to the + originating disk. No additional parameters are + needed. SPECIFIC_LOCATIONS (280093809): Store snapshot in the specific locations, as specified by the user. The list of regions to store must be defined under @@ -94667,7 +97167,8 @@ class SnapshotSettingsStorageLocationSettingsStorageLocationPreference(proto.Mes Attributes: name (str): Name of the location. It should be one of the - GCS buckets. + Cloud Storage buckets. Only one location can be + specified. This field is a member of `oneof`_ ``_name``. """ @@ -96596,17 +99097,18 @@ class StopInstanceRequest(proto.Message): ) -class Subnetwork(proto.Message): - r"""Represents a Subnetwork resource. A subnetwork (also known as - a subnet) is a logical partition of a Virtual Private Cloud - network with one primary IP range and zero or more secondary IP - ranges. For more information, read Virtual Private Cloud (VPC) - Network. - +class StoragePool(proto.Message): + r"""Represents a zonal storage pool resource. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + capacity_provisioning_type (str): + Provisioning type of the byte capacity of the + pool. Check the CapacityProvisioningType enum + for the list of possible values. + + This field is a member of `oneof`_ ``_capacity_provisioning_type``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. @@ -96614,361 +99116,174 @@ class Subnetwork(proto.Message): description (str): An optional description of this resource. Provide this property when you create the - resource. This field can be set only at resource - creation time. + resource. This field is a member of `oneof`_ ``_description``. - enable_flow_logs (bool): - Whether to enable flow logging for this subnetwork. If this - field is not explicitly set, it will not appear in get - listings. If not set the default behavior is determined by - the org policy, if there is no org policy specified, then it - will default to disabled. This field isn't supported if the - subnet purpose field is set to REGIONAL_MANAGED_PROXY. - - This field is a member of `oneof`_ ``_enable_flow_logs``. - external_ipv6_prefix (str): - The external IPv6 address range that is owned - by this subnetwork. - - This field is a member of `oneof`_ ``_external_ipv6_prefix``. - fingerprint (str): - Fingerprint of this resource. A hash of the - contents stored in this object. This field is - used in optimistic locking. This field will be - ignored when inserting a Subnetwork. An - up-to-date fingerprint must be provided in order - to update the Subnetwork, otherwise the request - will fail with error 412 conditionNotMet. To see - the latest fingerprint, make a get() request to - retrieve a Subnetwork. - - This field is a member of `oneof`_ ``_fingerprint``. - gateway_address (str): - [Output Only] The gateway address for default routes to - reach destination addresses outside this subnetwork. - - This field is a member of `oneof`_ ``_gateway_address``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. This field is a member of `oneof`_ ``_id``. - internal_ipv6_prefix (str): - [Output Only] The internal IPv6 address range that is - assigned to this subnetwork. - - This field is a member of `oneof`_ ``_internal_ipv6_prefix``. - ip_cidr_range (str): - The range of internal addresses that are - owned by this subnetwork. Provide this property - when you create the subnetwork. For example, - 10.0.0.0/8 or 100.64.0.0/10. Ranges must be - unique and non-overlapping within a network. - Only IPv4 is supported. This field is set at - resource creation time. The range can be any - range listed in the Valid ranges list. The range - can be expanded after creation using - expandIpCidrRange. - - This field is a member of `oneof`_ ``_ip_cidr_range``. - ipv6_access_type (str): - The access type of IPv6 address this subnet holds. It's - immutable and can only be specified during creation or the - first time the subnet is updated into IPV4_IPV6 dual stack. - Check the Ipv6AccessType enum for the list of possible - values. - - This field is a member of `oneof`_ ``_ipv6_access_type``. - ipv6_cidr_range (str): - [Output Only] This field is for internal use. - - This field is a member of `oneof`_ ``_ipv6_cidr_range``. kind (str): [Output Only] Type of the resource. Always - compute#subnetwork for Subnetwork resources. + compute#storagePool for storage pools. This field is a member of `oneof`_ ``_kind``. - log_config (google.cloud.compute_v1.types.SubnetworkLogConfig): - This field denotes the VPC flow logging - options for this subnetwork. If logging is - enabled, logs are exported to Cloud Logging. + label_fingerprint (str): + A fingerprint for the labels being applied to + this storage pool, which is essentially a hash + of the labels set used for optimistic locking. + The fingerprint is initially generated by + Compute Engine and changes after every request + to modify or update labels. You must always + provide an up-to-date fingerprint hash in order + to update or change labels, otherwise the + request will fail with error 412 + conditionNotMet. To see the latest fingerprint, + make a get() request to retrieve a storage pool. - This field is a member of `oneof`_ ``_log_config``. + This field is a member of `oneof`_ ``_label_fingerprint``. + labels (MutableMapping[str, str]): + Labels to apply to this storage pool. These + can be later modified by the setLabels method. name (str): - The name of the resource, provided by the client when - initially creating the resource. The name must be 1-63 - characters long, and comply with RFC1035. Specifically, the - name must be 1-63 characters long and match the regular - expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the - first character must be a lowercase letter, and all - following characters must be a dash, lowercase letter, or - digit, except the last character, which cannot be a dash. + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply with RFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. This field is a member of `oneof`_ ``_name``. - network (str): - The URL of the network to which this - subnetwork belongs, provided by the client when - initially creating the subnetwork. This field - can be set only at resource creation time. - - This field is a member of `oneof`_ ``_network``. - private_ip_google_access (bool): - Whether the VMs in this subnet can access - Google services without assigned external IP - addresses. This field can be both set at - resource creation time and updated using - setPrivateIpGoogleAccess. - - This field is a member of `oneof`_ ``_private_ip_google_access``. - private_ipv6_google_access (str): - This field is for internal use. This field - can be both set at resource creation time and - updated using patch. Check the - PrivateIpv6GoogleAccess enum for the list of - possible values. - - This field is a member of `oneof`_ ``_private_ipv6_google_access``. - purpose (str): - The purpose of the resource. This field can be either - PRIVATE, GLOBAL_MANAGED_PROXY, REGIONAL_MANAGED_PROXY, - PRIVATE_SERVICE_CONNECT, or PRIVATE is the default purpose - for user-created subnets or subnets that are automatically - created in auto mode networks. Subnets with purpose set to - GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY are - user-created subnetworks that are reserved for Envoy-based - load balancers. A subnet with purpose set to - PRIVATE_SERVICE_CONNECT is used to publish services using - Private Service Connect. If unspecified, the subnet purpose - defaults to PRIVATE. The enableFlowLogs field isn't - supported if the subnet purpose field is set to - GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY. Check the - Purpose enum for the list of possible values. - - This field is a member of `oneof`_ ``_purpose``. - region (str): - URL of the region where the Subnetwork - resides. This field can be set only at resource - creation time. - - This field is a member of `oneof`_ ``_region``. - reserved_internal_range (str): - The URL of the reserved internal range. + performance_provisioning_type (str): + Provisioning type of the performance-related + parameters of the pool, such as throughput and + IOPS. Check the PerformanceProvisioningType enum + for the list of possible values. - This field is a member of `oneof`_ ``_reserved_internal_range``. - role (str): - The role of subnetwork. Currently, this field is only used - when purpose is set to GLOBAL_MANAGED_PROXY or - REGIONAL_MANAGED_PROXY. The value can be set to ACTIVE or - BACKUP. An ACTIVE subnetwork is one that is currently being - used for Envoy-based load balancers in a region. A BACKUP - subnetwork is one that is ready to be promoted to ACTIVE or - is currently draining. This field can be updated with a - patch request. Check the Role enum for the list of possible - values. + This field is a member of `oneof`_ ``_performance_provisioning_type``. + pool_provisioned_capacity_gb (int): + Size, in GiB, of the storage pool. + + This field is a member of `oneof`_ ``_pool_provisioned_capacity_gb``. + pool_provisioned_iops (int): + Provisioned IOPS of the storage pool. Only + relevant if the storage pool type is + hyperdisk-balanced. + + This field is a member of `oneof`_ ``_pool_provisioned_iops``. + pool_provisioned_throughput (int): + Provisioned throughput of the storage pool. + Only relevant if the storage pool type is + hyperdisk-balanced or hyperdisk-throughput. + + This field is a member of `oneof`_ ``_pool_provisioned_throughput``. + resource_status (google.cloud.compute_v1.types.StoragePoolResourceStatus): + [Output Only] Status information for the storage pool + resource. - This field is a member of `oneof`_ ``_role``. - secondary_ip_ranges (MutableSequence[google.cloud.compute_v1.types.SubnetworkSecondaryRange]): - An array of configurations for secondary IP - ranges for VM instances contained in this - subnetwork. The primary IP of such VM must - belong to the primary ipCidrRange of the - subnetwork. The alias IPs may belong to either - primary or secondary ranges. This field can be - updated with a patch request. + This field is a member of `oneof`_ ``_resource_status``. self_link (str): - [Output Only] Server-defined URL for the resource. + [Output Only] Server-defined fully-qualified URL for this + resource. This field is a member of `oneof`_ ``_self_link``. - stack_type (str): - The stack type for the subnet. If set to IPV4_ONLY, new VMs - in the subnet are assigned IPv4 addresses only. If set to - IPV4_IPV6, new VMs in the subnet can be assigned both IPv4 - and IPv6 addresses. If not specified, IPV4_ONLY is used. - This field can be both set at resource creation time and - updated using patch. Check the StackType enum for the list - of possible values. + self_link_with_id (str): + [Output Only] Server-defined URL for this resource's + resource id. - This field is a member of `oneof`_ ``_stack_type``. + This field is a member of `oneof`_ ``_self_link_with_id``. state (str): - [Output Only] The state of the subnetwork, which can be one - of the following values: READY: Subnetwork is created and - ready to use DRAINING: only applicable to subnetworks that - have the purpose set to INTERNAL_HTTPS_LOAD_BALANCER and - indicates that connections to the load balancer are being - drained. A subnetwork that is draining cannot be used or - modified until it reaches a status of READY Check the State - enum for the list of possible values. + [Output Only] The status of storage pool creation. - + CREATING: Storage pool is provisioning. storagePool. - + FAILED: Storage pool creation failed. - READY: Storage pool + is ready for use. - DELETING: Storage pool is deleting. + Check the State enum for the list of possible values. This field is a member of `oneof`_ ``_state``. - """ + status (google.cloud.compute_v1.types.StoragePoolResourceStatus): + [Output Only] Status information for the storage pool + resource. - class Ipv6AccessType(proto.Enum): - r"""The access type of IPv6 address this subnet holds. It's immutable - and can only be specified during creation or the first time the - subnet is updated into IPV4_IPV6 dual stack. + This field is a member of `oneof`_ ``_status``. + storage_pool_type (str): + Type of the storage pool. - Values: - UNDEFINED_IPV6_ACCESS_TYPE (0): - A value indicating that the enum field is not - set. - EXTERNAL (35607499): - VMs on this subnet will be assigned IPv6 - addresses that are accessible via the Internet, - as well as the VPC network. - INTERNAL (279295677): - VMs on this subnet will be assigned IPv6 - addresses that are only accessible over the VPC - network. - UNSPECIFIED_IPV6_ACCESS_TYPE (313080613): - No description available. - """ - UNDEFINED_IPV6_ACCESS_TYPE = 0 - EXTERNAL = 35607499 - INTERNAL = 279295677 - UNSPECIFIED_IPV6_ACCESS_TYPE = 313080613 + This field is a member of `oneof`_ ``_storage_pool_type``. + zone (str): + [Output Only] URL of the zone where the storage pool + resides. You must specify this field as part of the HTTP + request URL. It is not settable as a field in the request + body. - class PrivateIpv6GoogleAccess(proto.Enum): - r"""This field is for internal use. This field can be both set at - resource creation time and updated using patch. + This field is a member of `oneof`_ ``_zone``. + """ - Values: - UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS (0): - A value indicating that the enum field is not - set. - DISABLE_GOOGLE_ACCESS (450958579): - Disable private IPv6 access to/from Google - services. - ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (427975994): - Bidirectional private IPv6 access to/from - Google services. - ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (288210263): - Outbound private IPv6 access from VMs in this - subnet to Google services. - """ - UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS = 0 - DISABLE_GOOGLE_ACCESS = 450958579 - ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 427975994 - ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 288210263 + class CapacityProvisioningType(proto.Enum): + r"""Provisioning type of the byte capacity of the pool. + Additional supported values which may be not listed in the enum + directly due to technical reasons: - class Purpose(proto.Enum): - r"""The purpose of the resource. This field can be either PRIVATE, - GLOBAL_MANAGED_PROXY, REGIONAL_MANAGED_PROXY, - PRIVATE_SERVICE_CONNECT, or PRIVATE is the default purpose for - user-created subnets or subnets that are automatically created in - auto mode networks. Subnets with purpose set to GLOBAL_MANAGED_PROXY - or REGIONAL_MANAGED_PROXY are user-created subnetworks that are - reserved for Envoy-based load balancers. A subnet with purpose set - to PRIVATE_SERVICE_CONNECT is used to publish services using Private - Service Connect. If unspecified, the subnet purpose defaults to - PRIVATE. The enableFlowLogs field isn't supported if the subnet - purpose field is set to GLOBAL_MANAGED_PROXY or - REGIONAL_MANAGED_PROXY. + ADVANCED + STANDARD + UNSPECIFIED Values: - UNDEFINED_PURPOSE (0): + UNDEFINED_CAPACITY_PROVISIONING_TYPE (0): A value indicating that the enum field is not set. - GLOBAL_MANAGED_PROXY (236463602): - Subnet reserved for Global Envoy-based Load - Balancing. - INTERNAL_HTTPS_LOAD_BALANCER (248748889): - Subnet reserved for Internal HTTP(S) Load Balancing. This is - a legacy purpose, please use REGIONAL_MANAGED_PROXY instead. - PRIVATE (403485027): - Regular user created or automatically created - subnet. - PRIVATE_NAT (367764517): - Subnetwork used as source range for Private - NAT Gateways. - PRIVATE_RFC_1918 (254902107): - Regular user created or automatically created - subnet. - PRIVATE_SERVICE_CONNECT (48134724): - Subnetworks created for Private Service - Connect in the producer network. - REGIONAL_MANAGED_PROXY (153049966): - Subnetwork used for Regional Envoy-based Load - Balancing. """ - UNDEFINED_PURPOSE = 0 - GLOBAL_MANAGED_PROXY = 236463602 - INTERNAL_HTTPS_LOAD_BALANCER = 248748889 - PRIVATE = 403485027 - PRIVATE_NAT = 367764517 - PRIVATE_RFC_1918 = 254902107 - PRIVATE_SERVICE_CONNECT = 48134724 - REGIONAL_MANAGED_PROXY = 153049966 + UNDEFINED_CAPACITY_PROVISIONING_TYPE = 0 - class Role(proto.Enum): - r"""The role of subnetwork. Currently, this field is only used when - purpose is set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY. - The value can be set to ACTIVE or BACKUP. An ACTIVE subnetwork is - one that is currently being used for Envoy-based load balancers in a - region. A BACKUP subnetwork is one that is ready to be promoted to - ACTIVE or is currently draining. This field can be updated with a - patch request. - - Values: - UNDEFINED_ROLE (0): - A value indicating that the enum field is not - set. - ACTIVE (314733318): - The ACTIVE subnet that is currently used. - BACKUP (341010882): - The BACKUP subnet that could be promoted to - ACTIVE. - """ - UNDEFINED_ROLE = 0 - ACTIVE = 314733318 - BACKUP = 341010882 + class PerformanceProvisioningType(proto.Enum): + r"""Provisioning type of the performance-related parameters of + the pool, such as throughput and IOPS. Additional supported + values which may be not listed in the enum directly due to + technical reasons: - class StackType(proto.Enum): - r"""The stack type for the subnet. If set to IPV4_ONLY, new VMs in the - subnet are assigned IPv4 addresses only. If set to IPV4_IPV6, new - VMs in the subnet can be assigned both IPv4 and IPv6 addresses. If - not specified, IPV4_ONLY is used. This field can be both set at - resource creation time and updated using patch. + ADVANCED + STANDARD + UNSPECIFIED Values: - UNDEFINED_STACK_TYPE (0): + UNDEFINED_PERFORMANCE_PROVISIONING_TYPE (0): A value indicating that the enum field is not set. - IPV4_IPV6 (22197249): - New VMs in this subnet can have both IPv4 and - IPv6 addresses. - IPV4_ONLY (22373798): - New VMs in this subnet will only be assigned - IPv4 addresses. - UNSPECIFIED_STACK_TYPE (298084569): - No description available. """ - UNDEFINED_STACK_TYPE = 0 - IPV4_IPV6 = 22197249 - IPV4_ONLY = 22373798 - UNSPECIFIED_STACK_TYPE = 298084569 + UNDEFINED_PERFORMANCE_PROVISIONING_TYPE = 0 class State(proto.Enum): - r"""[Output Only] The state of the subnetwork, which can be one of the - following values: READY: Subnetwork is created and ready to use - DRAINING: only applicable to subnetworks that have the purpose set - to INTERNAL_HTTPS_LOAD_BALANCER and indicates that connections to - the load balancer are being drained. A subnetwork that is draining - cannot be used or modified until it reaches a status of READY + r"""[Output Only] The status of storage pool creation. - CREATING: + Storage pool is provisioning. storagePool. - FAILED: Storage pool + creation failed. - READY: Storage pool is ready for use. - DELETING: + Storage pool is deleting. Values: UNDEFINED_STATE (0): A value indicating that the enum field is not set. - DRAINING (480455402): - Subnetwork is being drained. + CREATING (455564985): + StoragePool is provisioning + DELETING (528602024): + StoragePool is deleting. + FAILED (455706685): + StoragePool creation failed. READY (77848963): - Subnetwork is ready for use. + StoragePool is ready for use. """ UNDEFINED_STATE = 0 - DRAINING = 480455402 + CREATING = 455564985 + DELETING = 528602024 + FAILED = 455706685 READY = 77848963 + capacity_provisioning_type: str = proto.Field( + proto.STRING, + number=251610375, + optional=True, + ) creation_timestamp: str = proto.Field( proto.STRING, number=30525366, @@ -96979,143 +99294,1504 @@ class State(proto.Enum): number=422937596, optional=True, ) - enable_flow_logs: bool = proto.Field( - proto.BOOL, - number=151544420, - optional=True, - ) - external_ipv6_prefix: str = proto.Field( - proto.STRING, - number=139299190, - optional=True, - ) - fingerprint: str = proto.Field( - proto.STRING, - number=234678500, - optional=True, - ) - gateway_address: str = proto.Field( - proto.STRING, - number=459867385, - optional=True, - ) id: int = proto.Field( proto.UINT64, number=3355, optional=True, ) - internal_ipv6_prefix: str = proto.Field( - proto.STRING, - number=506270056, - optional=True, - ) - ip_cidr_range: str = proto.Field( + kind: str = proto.Field( proto.STRING, - number=98117322, + number=3292052, optional=True, ) - ipv6_access_type: str = proto.Field( + label_fingerprint: str = proto.Field( proto.STRING, - number=504658653, + number=178124825, optional=True, ) - ipv6_cidr_range: str = proto.Field( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, - number=273141258, - optional=True, - ) - kind: str = proto.Field( proto.STRING, - number=3292052, - optional=True, - ) - log_config: "SubnetworkLogConfig" = proto.Field( - proto.MESSAGE, - number=351299741, - optional=True, - message="SubnetworkLogConfig", + number=500195327, ) name: str = proto.Field( proto.STRING, number=3373707, optional=True, ) - network: str = proto.Field( + performance_provisioning_type: str = proto.Field( proto.STRING, - number=232872494, + number=468553469, optional=True, ) - private_ip_google_access: bool = proto.Field( - proto.BOOL, - number=421491790, + pool_provisioned_capacity_gb: int = proto.Field( + proto.INT64, + number=478537682, optional=True, ) - private_ipv6_google_access: str = proto.Field( - proto.STRING, - number=48277006, + pool_provisioned_iops: int = proto.Field( + proto.INT64, + number=112092311, optional=True, ) - purpose: str = proto.Field( - proto.STRING, - number=316407070, + pool_provisioned_throughput: int = proto.Field( + proto.INT64, + number=169215640, optional=True, ) - region: str = proto.Field( + resource_status: "StoragePoolResourceStatus" = proto.Field( + proto.MESSAGE, + number=249429315, + optional=True, + message="StoragePoolResourceStatus", + ) + self_link: str = proto.Field( proto.STRING, - number=138946292, + number=456214797, optional=True, ) - reserved_internal_range: str = proto.Field( + self_link_with_id: str = proto.Field( proto.STRING, - number=286248754, + number=44520962, optional=True, ) - role: str = proto.Field( + state: str = proto.Field( proto.STRING, - number=3506294, + number=109757585, optional=True, ) - secondary_ip_ranges: MutableSequence[ - "SubnetworkSecondaryRange" - ] = proto.RepeatedField( + status: "StoragePoolResourceStatus" = proto.Field( proto.MESSAGE, - number=136658915, - message="SubnetworkSecondaryRange", - ) - self_link: str = proto.Field( - proto.STRING, - number=456214797, + number=181260274, optional=True, + message="StoragePoolResourceStatus", ) - stack_type: str = proto.Field( + storage_pool_type: str = proto.Field( proto.STRING, - number=425908881, + number=285999289, optional=True, ) - state: str = proto.Field( + zone: str = proto.Field( proto.STRING, - number=109757585, + number=3744684, optional=True, ) -class SubnetworkAggregatedList(proto.Message): +class StoragePoolAggregatedList(proto.Message): r""" .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. id (str): [Output Only] Unique identifier for the resource; defined by the server. This field is a member of `oneof`_ ``_id``. - items (MutableMapping[str, google.cloud.compute_v1.types.SubnetworksScopedList]): - A list of SubnetworksScopedList resources. + items (MutableMapping[str, google.cloud.compute_v1.types.StoragePoolsScopedList]): + A list of StoragePoolsScopedList resources. kind (str): [Output Only] Type of resource. Always - compute#subnetworkAggregatedList for aggregated lists of - subnetworks. + compute#storagePoolAggregatedList for aggregated lists of + storage pools. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, "StoragePoolsScopedList"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message="StoragePoolsScopedList", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + +class StoragePoolDisk(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + attached_instances (MutableSequence[str]): + [Output Only] Instances this disk is attached to. + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + disk (str): + [Output Only] The URL of the disk. + + This field is a member of `oneof`_ ``_disk``. + name (str): + [Output Only] The name of the disk. + + This field is a member of `oneof`_ ``_name``. + provisioned_iops (int): + [Output Only] The number of IOPS provisioned for the disk. + + This field is a member of `oneof`_ ``_provisioned_iops``. + provisioned_throughput (int): + [Output Only] The throughput provisioned for the disk. + + This field is a member of `oneof`_ ``_provisioned_throughput``. + resource_policies (MutableSequence[str]): + [Output Only] Resource policies applied to disk for + automatic snapshot creations. + size_gb (int): + [Output Only] The disk size, in GB. + + This field is a member of `oneof`_ ``_size_gb``. + status (str): + [Output Only] The disk status. Check the Status enum for the + list of possible values. + + This field is a member of `oneof`_ ``_status``. + type_ (str): + [Output Only] The disk type. + + This field is a member of `oneof`_ ``_type``. + used_bytes (int): + [Output Only] Amount of disk space used. + + This field is a member of `oneof`_ ``_used_bytes``. + """ + + class Status(proto.Enum): + r"""[Output Only] The disk status. + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + Disk is provisioning + DELETING (528602024): + Disk is deleting. + FAILED (455706685): + Disk creation failed. + READY (77848963): + Disk is ready for use. + RESTORING (404263851): + Source data is being copied into the disk. + UNAVAILABLE (413756464): + Disk is currently unavailable and cannot be + accessed, attached or detached. + """ + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + FAILED = 455706685 + READY = 77848963 + RESTORING = 404263851 + UNAVAILABLE = 413756464 + + attached_instances: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=65255843, + ) + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + disk: str = proto.Field( + proto.STRING, + number=3083677, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + provisioned_iops: int = proto.Field( + proto.INT64, + number=186769108, + optional=True, + ) + provisioned_throughput: int = proto.Field( + proto.INT64, + number=526524181, + optional=True, + ) + resource_policies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=22220385, + ) + size_gb: int = proto.Field( + proto.INT64, + number=494929369, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + used_bytes: int = proto.Field( + proto.INT64, + number=231640425, + optional=True, + ) + + +class StoragePoolList(proto.Message): + r"""A list of StoragePool resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.StoragePool]): + A list of StoragePool resources. + kind (str): + [Output Only] Type of resource. Always + compute#storagePoolList for lists of storagePools. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. end_interface: + MixerListResponseWithEtagBuilder + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["StoragePool"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="StoragePool", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + +class StoragePoolListDisks(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.StoragePoolDisk]): + A list of StoragePoolDisk resources. + kind (str): + [Output Only] Type of resource. Always + compute#storagePoolListDisks for lists of disks in a + storagePool. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + [Output Only] Unreachable resources. end_interface: + MixerListResponseWithEtagBuilder + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["StoragePoolDisk"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="StoragePoolDisk", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + +class StoragePoolResourceStatus(proto.Message): + r"""[Output Only] Contains output only fields. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk_count (int): + [Output Only] Number of disks used. + + This field is a member of `oneof`_ ``_disk_count``. + last_resize_timestamp (str): + [Output Only] Timestamp of the last successful resize in + RFC3339 text format. + + This field is a member of `oneof`_ ``_last_resize_timestamp``. + max_total_provisioned_disk_capacity_gb (int): + [Output Only] Maximum allowed aggregate disk size in + gigabytes. + + This field is a member of `oneof`_ ``_max_total_provisioned_disk_capacity_gb``. + pool_used_capacity_bytes (int): + [Output Only] Space used by data stored in disks within the + storage pool (in bytes). This will reflect the total number + of bytes written to the disks in the pool, in contrast to + the capacity of those disks. + + This field is a member of `oneof`_ ``_pool_used_capacity_bytes``. + pool_used_iops (int): + Sum of all the disks' provisioned IOPS, minus + some amount that is allowed per disk that is not + counted towards pool's IOPS capacity. + + This field is a member of `oneof`_ ``_pool_used_iops``. + pool_used_throughput (int): + [Output Only] Sum of all the disks' provisioned throughput + in MB/s. + + This field is a member of `oneof`_ ``_pool_used_throughput``. + pool_user_written_bytes (int): + [Output Only] Amount of data written into the pool, before + it is compacted. + + This field is a member of `oneof`_ ``_pool_user_written_bytes``. + total_provisioned_disk_capacity_gb (int): + [Output Only] Sum of all the capacity provisioned in disks + in this storage pool. A disk's provisioned capacity is the + same as its total capacity. + + This field is a member of `oneof`_ ``_total_provisioned_disk_capacity_gb``. + total_provisioned_disk_iops (int): + [Output Only] Sum of all the disks' provisioned IOPS. + + This field is a member of `oneof`_ ``_total_provisioned_disk_iops``. + total_provisioned_disk_throughput (int): + [Output Only] Sum of all the disks' provisioned throughput + in MB/s, minus some amount that is allowed per disk that is + not counted towards pool's throughput capacity. + + This field is a member of `oneof`_ ``_total_provisioned_disk_throughput``. + """ + + disk_count: int = proto.Field( + proto.INT64, + number=182933485, + optional=True, + ) + last_resize_timestamp: str = proto.Field( + proto.STRING, + number=500825556, + optional=True, + ) + max_total_provisioned_disk_capacity_gb: int = proto.Field( + proto.INT64, + number=165818207, + optional=True, + ) + pool_used_capacity_bytes: int = proto.Field( + proto.INT64, + number=510407877, + optional=True, + ) + pool_used_iops: int = proto.Field( + proto.INT64, + number=99558536, + optional=True, + ) + pool_used_throughput: int = proto.Field( + proto.INT64, + number=206130633, + optional=True, + ) + pool_user_written_bytes: int = proto.Field( + proto.INT64, + number=228964050, + optional=True, + ) + total_provisioned_disk_capacity_gb: int = proto.Field( + proto.INT64, + number=520930980, + optional=True, + ) + total_provisioned_disk_iops: int = proto.Field( + proto.INT64, + number=32812549, + optional=True, + ) + total_provisioned_disk_throughput: int = proto.Field( + proto.INT64, + number=447677830, + optional=True, + ) + + +class StoragePoolType(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + deprecated (google.cloud.compute_v1.types.DeprecationStatus): + [Output Only] The deprecation status associated with this + storage pool type. + + This field is a member of `oneof`_ ``_deprecated``. + description (str): + [Output Only] An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + [Output Only] Type of the resource. Always + compute#storagePoolType for storage pool types. + + This field is a member of `oneof`_ ``_kind``. + max_pool_provisioned_capacity_gb (int): + [Output Only] Maximum storage pool size in GB. + + This field is a member of `oneof`_ ``_max_pool_provisioned_capacity_gb``. + max_pool_provisioned_iops (int): + [Output Only] Maximum provisioned IOPS. + + This field is a member of `oneof`_ ``_max_pool_provisioned_iops``. + max_pool_provisioned_throughput (int): + [Output Only] Maximum provisioned throughput. + + This field is a member of `oneof`_ ``_max_pool_provisioned_throughput``. + min_pool_provisioned_capacity_gb (int): + [Output Only] Minimum storage pool size in GB. + + This field is a member of `oneof`_ ``_min_pool_provisioned_capacity_gb``. + min_pool_provisioned_iops (int): + [Output Only] Minimum provisioned IOPS. + + This field is a member of `oneof`_ ``_min_pool_provisioned_iops``. + min_pool_provisioned_throughput (int): + [Output Only] Minimum provisioned throughput. + + This field is a member of `oneof`_ ``_min_pool_provisioned_throughput``. + min_size_gb (int): + [Deprecated] This field is deprecated. Use + minPoolProvisionedCapacityGb instead. + + This field is a member of `oneof`_ ``_min_size_gb``. + name (str): + [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + [Output Only] Server-defined URL for this resource with the + resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. + supported_disk_types (MutableSequence[str]): + [Output Only] The list of disk types supported in this + storage pool type. + zone (str): + [Output Only] URL of the zone where the storage pool type + resides. You must specify this field as part of the HTTP + request URL. It is not settable as a field in the request + body. + + This field is a member of `oneof`_ ``_zone``. + """ + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + deprecated: "DeprecationStatus" = proto.Field( + proto.MESSAGE, + number=515138995, + optional=True, + message="DeprecationStatus", + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + max_pool_provisioned_capacity_gb: int = proto.Field( + proto.INT64, + number=182139085, + optional=True, + ) + max_pool_provisioned_iops: int = proto.Field( + proto.INT64, + number=515270652, + optional=True, + ) + max_pool_provisioned_throughput: int = proto.Field( + proto.INT64, + number=228928061, + optional=True, + ) + min_pool_provisioned_capacity_gb: int = proto.Field( + proto.INT64, + number=191022751, + optional=True, + ) + min_pool_provisioned_iops: int = proto.Field( + proto.INT64, + number=416473706, + optional=True, + ) + min_pool_provisioned_throughput: int = proto.Field( + proto.INT64, + number=367761963, + optional=True, + ) + min_size_gb: int = proto.Field( + proto.INT64, + number=385278188, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + supported_disk_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=150587272, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class StoragePoolTypeAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.StoragePoolTypesScopedList]): + A list of StoragePoolTypesScopedList + resources. + kind (str): + [Output Only] Type of resource. Always + compute#storagePoolTypeAggregatedList . + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, "StoragePoolTypesScopedList"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message="StoragePoolTypesScopedList", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + +class StoragePoolTypeList(proto.Message): + r"""Contains a list of storage pool types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.StoragePoolType]): + A list of StoragePoolType resources. + kind (str): + [Output Only] Type of resource. Always + compute#storagePoolTypeList for storage pool types. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger than maxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["StoragePoolType"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="StoragePoolType", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + +class StoragePoolTypesScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + storage_pool_types (MutableSequence[google.cloud.compute_v1.types.StoragePoolType]): + [Output Only] A list of storage pool types contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of storage pool types when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + storage_pool_types: MutableSequence["StoragePoolType"] = proto.RepeatedField( + proto.MESSAGE, + number=276043482, + message="StoragePoolType", + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + +class StoragePoolsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + storage_pools (MutableSequence[google.cloud.compute_v1.types.StoragePool]): + [Output Only] A list of storage pool contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning which replaces the list + of storage pool when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + storage_pools: MutableSequence["StoragePool"] = proto.RepeatedField( + proto.MESSAGE, + number=437258515, + message="StoragePool", + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + +class Subnetwork(proto.Message): + r"""Represents a Subnetwork resource. A subnetwork (also known as + a subnet) is a logical partition of a Virtual Private Cloud + network with one primary IP range and zero or more secondary IP + ranges. For more information, read Virtual Private Cloud (VPC) + Network. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + Provide this property when you create the + resource. This field can be set only at resource + creation time. + + This field is a member of `oneof`_ ``_description``. + enable_flow_logs (bool): + Whether to enable flow logging for this subnetwork. If this + field is not explicitly set, it will not appear in get + listings. If not set the default behavior is determined by + the org policy, if there is no org policy specified, then it + will default to disabled. This field isn't supported if the + subnet purpose field is set to REGIONAL_MANAGED_PROXY. + + This field is a member of `oneof`_ ``_enable_flow_logs``. + external_ipv6_prefix (str): + The external IPv6 address range that is owned + by this subnetwork. + + This field is a member of `oneof`_ ``_external_ipv6_prefix``. + fingerprint (str): + Fingerprint of this resource. A hash of the + contents stored in this object. This field is + used in optimistic locking. This field will be + ignored when inserting a Subnetwork. An + up-to-date fingerprint must be provided in order + to update the Subnetwork, otherwise the request + will fail with error 412 conditionNotMet. To see + the latest fingerprint, make a get() request to + retrieve a Subnetwork. + + This field is a member of `oneof`_ ``_fingerprint``. + gateway_address (str): + [Output Only] The gateway address for default routes to + reach destination addresses outside this subnetwork. + + This field is a member of `oneof`_ ``_gateway_address``. + id (int): + [Output Only] The unique identifier for the resource. This + identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + internal_ipv6_prefix (str): + [Output Only] The internal IPv6 address range that is + assigned to this subnetwork. + + This field is a member of `oneof`_ ``_internal_ipv6_prefix``. + ip_cidr_range (str): + The range of internal addresses that are + owned by this subnetwork. Provide this property + when you create the subnetwork. For example, + 10.0.0.0/8 or 100.64.0.0/10. Ranges must be + unique and non-overlapping within a network. + Only IPv4 is supported. This field is set at + resource creation time. The range can be any + range listed in the Valid ranges list. The range + can be expanded after creation using + expandIpCidrRange. + + This field is a member of `oneof`_ ``_ip_cidr_range``. + ipv6_access_type (str): + The access type of IPv6 address this subnet holds. It's + immutable and can only be specified during creation or the + first time the subnet is updated into IPV4_IPV6 dual stack. + Check the Ipv6AccessType enum for the list of possible + values. + + This field is a member of `oneof`_ ``_ipv6_access_type``. + ipv6_cidr_range (str): + [Output Only] This field is for internal use. + + This field is a member of `oneof`_ ``_ipv6_cidr_range``. + kind (str): + [Output Only] Type of the resource. Always + compute#subnetwork for Subnetwork resources. + + This field is a member of `oneof`_ ``_kind``. + log_config (google.cloud.compute_v1.types.SubnetworkLogConfig): + This field denotes the VPC flow logging + options for this subnetwork. If logging is + enabled, logs are exported to Cloud Logging. + + This field is a member of `oneof`_ ``_log_config``. + name (str): + The name of the resource, provided by the client when + initially creating the resource. The name must be 1-63 + characters long, and comply with RFC1035. Specifically, the + name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + network (str): + The URL of the network to which this + subnetwork belongs, provided by the client when + initially creating the subnetwork. This field + can be set only at resource creation time. + + This field is a member of `oneof`_ ``_network``. + private_ip_google_access (bool): + Whether the VMs in this subnet can access + Google services without assigned external IP + addresses. This field can be both set at + resource creation time and updated using + setPrivateIpGoogleAccess. + + This field is a member of `oneof`_ ``_private_ip_google_access``. + private_ipv6_google_access (str): + This field is for internal use. This field + can be both set at resource creation time and + updated using patch. Check the + PrivateIpv6GoogleAccess enum for the list of + possible values. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. + purpose (str): + The purpose of the resource. This field can be either + PRIVATE, GLOBAL_MANAGED_PROXY, REGIONAL_MANAGED_PROXY, + PRIVATE_SERVICE_CONNECT, or PRIVATE is the default purpose + for user-created subnets or subnets that are automatically + created in auto mode networks. Subnets with purpose set to + GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY are + user-created subnetworks that are reserved for Envoy-based + load balancers. A subnet with purpose set to + PRIVATE_SERVICE_CONNECT is used to publish services using + Private Service Connect. If unspecified, the subnet purpose + defaults to PRIVATE. The enableFlowLogs field isn't + supported if the subnet purpose field is set to + GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY. Check the + Purpose enum for the list of possible values. + + This field is a member of `oneof`_ ``_purpose``. + region (str): + URL of the region where the Subnetwork + resides. This field can be set only at resource + creation time. + + This field is a member of `oneof`_ ``_region``. + reserved_internal_range (str): + The URL of the reserved internal range. + + This field is a member of `oneof`_ ``_reserved_internal_range``. + role (str): + The role of subnetwork. Currently, this field is only used + when purpose is set to GLOBAL_MANAGED_PROXY or + REGIONAL_MANAGED_PROXY. The value can be set to ACTIVE or + BACKUP. An ACTIVE subnetwork is one that is currently being + used for Envoy-based load balancers in a region. A BACKUP + subnetwork is one that is ready to be promoted to ACTIVE or + is currently draining. This field can be updated with a + patch request. Check the Role enum for the list of possible + values. + + This field is a member of `oneof`_ ``_role``. + secondary_ip_ranges (MutableSequence[google.cloud.compute_v1.types.SubnetworkSecondaryRange]): + An array of configurations for secondary IP + ranges for VM instances contained in this + subnetwork. The primary IP of such VM must + belong to the primary ipCidrRange of the + subnetwork. The alias IPs may belong to either + primary or secondary ranges. This field can be + updated with a patch request. + self_link (str): + [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. + stack_type (str): + The stack type for the subnet. If set to IPV4_ONLY, new VMs + in the subnet are assigned IPv4 addresses only. If set to + IPV4_IPV6, new VMs in the subnet can be assigned both IPv4 + and IPv6 addresses. If not specified, IPV4_ONLY is used. + This field can be both set at resource creation time and + updated using patch. Check the StackType enum for the list + of possible values. + + This field is a member of `oneof`_ ``_stack_type``. + state (str): + [Output Only] The state of the subnetwork, which can be one + of the following values: READY: Subnetwork is created and + ready to use DRAINING: only applicable to subnetworks that + have the purpose set to INTERNAL_HTTPS_LOAD_BALANCER and + indicates that connections to the load balancer are being + drained. A subnetwork that is draining cannot be used or + modified until it reaches a status of READY Check the State + enum for the list of possible values. + + This field is a member of `oneof`_ ``_state``. + """ + + class Ipv6AccessType(proto.Enum): + r"""The access type of IPv6 address this subnet holds. It's immutable + and can only be specified during creation or the first time the + subnet is updated into IPV4_IPV6 dual stack. + + Values: + UNDEFINED_IPV6_ACCESS_TYPE (0): + A value indicating that the enum field is not + set. + EXTERNAL (35607499): + VMs on this subnet will be assigned IPv6 + addresses that are accessible via the Internet, + as well as the VPC network. + INTERNAL (279295677): + VMs on this subnet will be assigned IPv6 + addresses that are only accessible over the VPC + network. + UNSPECIFIED_IPV6_ACCESS_TYPE (313080613): + No description available. + """ + UNDEFINED_IPV6_ACCESS_TYPE = 0 + EXTERNAL = 35607499 + INTERNAL = 279295677 + UNSPECIFIED_IPV6_ACCESS_TYPE = 313080613 + + class PrivateIpv6GoogleAccess(proto.Enum): + r"""This field is for internal use. This field can be both set at + resource creation time and updated using patch. + + Values: + UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS (0): + A value indicating that the enum field is not + set. + DISABLE_GOOGLE_ACCESS (450958579): + Disable private IPv6 access to/from Google + services. + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE (427975994): + Bidirectional private IPv6 access to/from + Google services. + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE (288210263): + Outbound private IPv6 access from VMs in this + subnet to Google services. + """ + UNDEFINED_PRIVATE_IPV6_GOOGLE_ACCESS = 0 + DISABLE_GOOGLE_ACCESS = 450958579 + ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE = 427975994 + ENABLE_OUTBOUND_VM_ACCESS_TO_GOOGLE = 288210263 + + class Purpose(proto.Enum): + r"""The purpose of the resource. This field can be either PRIVATE, + GLOBAL_MANAGED_PROXY, REGIONAL_MANAGED_PROXY, + PRIVATE_SERVICE_CONNECT, or PRIVATE is the default purpose for + user-created subnets or subnets that are automatically created in + auto mode networks. Subnets with purpose set to GLOBAL_MANAGED_PROXY + or REGIONAL_MANAGED_PROXY are user-created subnetworks that are + reserved for Envoy-based load balancers. A subnet with purpose set + to PRIVATE_SERVICE_CONNECT is used to publish services using Private + Service Connect. If unspecified, the subnet purpose defaults to + PRIVATE. The enableFlowLogs field isn't supported if the subnet + purpose field is set to GLOBAL_MANAGED_PROXY or + REGIONAL_MANAGED_PROXY. + + Values: + UNDEFINED_PURPOSE (0): + A value indicating that the enum field is not + set. + GLOBAL_MANAGED_PROXY (236463602): + Subnet reserved for Global Envoy-based Load + Balancing. + INTERNAL_HTTPS_LOAD_BALANCER (248748889): + Subnet reserved for Internal HTTP(S) Load Balancing. This is + a legacy purpose, please use REGIONAL_MANAGED_PROXY instead. + PRIVATE (403485027): + Regular user created or automatically created + subnet. + PRIVATE_NAT (367764517): + Subnetwork used as source range for Private + NAT Gateways. + PRIVATE_RFC_1918 (254902107): + Regular user created or automatically created + subnet. + PRIVATE_SERVICE_CONNECT (48134724): + Subnetworks created for Private Service + Connect in the producer network. + REGIONAL_MANAGED_PROXY (153049966): + Subnetwork used for Regional Envoy-based Load + Balancing. + """ + UNDEFINED_PURPOSE = 0 + GLOBAL_MANAGED_PROXY = 236463602 + INTERNAL_HTTPS_LOAD_BALANCER = 248748889 + PRIVATE = 403485027 + PRIVATE_NAT = 367764517 + PRIVATE_RFC_1918 = 254902107 + PRIVATE_SERVICE_CONNECT = 48134724 + REGIONAL_MANAGED_PROXY = 153049966 + + class Role(proto.Enum): + r"""The role of subnetwork. Currently, this field is only used when + purpose is set to GLOBAL_MANAGED_PROXY or REGIONAL_MANAGED_PROXY. + The value can be set to ACTIVE or BACKUP. An ACTIVE subnetwork is + one that is currently being used for Envoy-based load balancers in a + region. A BACKUP subnetwork is one that is ready to be promoted to + ACTIVE or is currently draining. This field can be updated with a + patch request. + + Values: + UNDEFINED_ROLE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + The ACTIVE subnet that is currently used. + BACKUP (341010882): + The BACKUP subnet that could be promoted to + ACTIVE. + """ + UNDEFINED_ROLE = 0 + ACTIVE = 314733318 + BACKUP = 341010882 + + class StackType(proto.Enum): + r"""The stack type for the subnet. If set to IPV4_ONLY, new VMs in the + subnet are assigned IPv4 addresses only. If set to IPV4_IPV6, new + VMs in the subnet can be assigned both IPv4 and IPv6 addresses. If + not specified, IPV4_ONLY is used. This field can be both set at + resource creation time and updated using patch. + + Values: + UNDEFINED_STACK_TYPE (0): + A value indicating that the enum field is not + set. + IPV4_IPV6 (22197249): + New VMs in this subnet can have both IPv4 and + IPv6 addresses. + IPV4_ONLY (22373798): + New VMs in this subnet will only be assigned + IPv4 addresses. + UNSPECIFIED_STACK_TYPE (298084569): + No description available. + """ + UNDEFINED_STACK_TYPE = 0 + IPV4_IPV6 = 22197249 + IPV4_ONLY = 22373798 + UNSPECIFIED_STACK_TYPE = 298084569 + + class State(proto.Enum): + r"""[Output Only] The state of the subnetwork, which can be one of the + following values: READY: Subnetwork is created and ready to use + DRAINING: only applicable to subnetworks that have the purpose set + to INTERNAL_HTTPS_LOAD_BALANCER and indicates that connections to + the load balancer are being drained. A subnetwork that is draining + cannot be used or modified until it reaches a status of READY + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + DRAINING (480455402): + Subnetwork is being drained. + READY (77848963): + Subnetwork is ready for use. + """ + UNDEFINED_STATE = 0 + DRAINING = 480455402 + READY = 77848963 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + enable_flow_logs: bool = proto.Field( + proto.BOOL, + number=151544420, + optional=True, + ) + external_ipv6_prefix: str = proto.Field( + proto.STRING, + number=139299190, + optional=True, + ) + fingerprint: str = proto.Field( + proto.STRING, + number=234678500, + optional=True, + ) + gateway_address: str = proto.Field( + proto.STRING, + number=459867385, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + internal_ipv6_prefix: str = proto.Field( + proto.STRING, + number=506270056, + optional=True, + ) + ip_cidr_range: str = proto.Field( + proto.STRING, + number=98117322, + optional=True, + ) + ipv6_access_type: str = proto.Field( + proto.STRING, + number=504658653, + optional=True, + ) + ipv6_cidr_range: str = proto.Field( + proto.STRING, + number=273141258, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + log_config: "SubnetworkLogConfig" = proto.Field( + proto.MESSAGE, + number=351299741, + optional=True, + message="SubnetworkLogConfig", + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + network: str = proto.Field( + proto.STRING, + number=232872494, + optional=True, + ) + private_ip_google_access: bool = proto.Field( + proto.BOOL, + number=421491790, + optional=True, + ) + private_ipv6_google_access: str = proto.Field( + proto.STRING, + number=48277006, + optional=True, + ) + purpose: str = proto.Field( + proto.STRING, + number=316407070, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + reserved_internal_range: str = proto.Field( + proto.STRING, + number=286248754, + optional=True, + ) + role: str = proto.Field( + proto.STRING, + number=3506294, + optional=True, + ) + secondary_ip_ranges: MutableSequence[ + "SubnetworkSecondaryRange" + ] = proto.RepeatedField( + proto.MESSAGE, + number=136658915, + message="SubnetworkSecondaryRange", + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + stack_type: str = proto.Field( + proto.STRING, + number=425908881, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + + +class SubnetworkAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.SubnetworksScopedList]): + A list of SubnetworksScopedList resources. + kind (str): + [Output Only] Type of resource. Always + compute#subnetworkAggregatedList for aggregated lists of + subnetworks. This field is a member of `oneof`_ ``_kind``. next_page_token (str): @@ -101587,8 +105263,249 @@ class TestIamPermissionsNodeTemplateRequest(proto.Message): ) -class TestIamPermissionsPacketMirroringRequest(proto.Message): - r"""A request message for PacketMirrorings.TestIamPermissions. +class TestIamPermissionsPacketMirroringRequest(proto.Message): + r"""A request message for PacketMirrorings.TestIamPermissions. + See the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + +class TestIamPermissionsRegionBackendServiceRequest(proto.Message): + r"""A request message for + RegionBackendServices.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + +class TestIamPermissionsRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.TestIamPermissions. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + +class TestIamPermissionsRegionInstantSnapshotRequest(proto.Message): + r"""A request message for + RegionInstantSnapshots.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + +class TestIamPermissionsRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + +class TestIamPermissionsReservationRequest(proto.Message): + r"""A request message for Reservations.TestIamPermissions. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class TestIamPermissionsResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.TestIamPermissions. + See the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + +class TestIamPermissionsServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.TestIamPermissions. See the method description for details. Attributes: @@ -101621,50 +105538,13 @@ class TestIamPermissionsPacketMirroringRequest(proto.Message): ) -class TestIamPermissionsRegionBackendServiceRequest(proto.Message): - r"""A request message for - RegionBackendServices.TestIamPermissions. See the method - description for details. - - Attributes: - project (str): - Project ID for this request. - region (str): - The name of the region for this request. - resource (str): - Name or id of the resource for this request. - test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): - The body resource for this request - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( - proto.MESSAGE, - number=439214758, - message="TestPermissionsRequest", - ) - - -class TestIamPermissionsRegionDiskRequest(proto.Message): - r"""A request message for RegionDisks.TestIamPermissions. See the +class TestIamPermissionsSnapshotRequest(proto.Message): + r"""A request message for Snapshots.TestIamPermissions. See the method description for details. Attributes: project (str): Project ID for this request. - region (str): - The name of the region for this request. resource (str): Name or id of the resource for this request. test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): @@ -101675,10 +105555,6 @@ class TestIamPermissionsRegionDiskRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) resource: str = proto.Field( proto.STRING, number=195806222, @@ -101690,78 +105566,8 @@ class TestIamPermissionsRegionDiskRequest(proto.Message): ) -class TestIamPermissionsRegionInstantSnapshotRequest(proto.Message): - r"""A request message for - RegionInstantSnapshots.TestIamPermissions. See the method - description for details. - - Attributes: - project (str): - Project ID for this request. - region (str): - The name of the region for this request. - resource (str): - Name or id of the resource for this request. - test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): - The body resource for this request - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( - proto.MESSAGE, - number=439214758, - message="TestPermissionsRequest", - ) - - -class TestIamPermissionsRegionNetworkFirewallPolicyRequest(proto.Message): - r"""A request message for - RegionNetworkFirewallPolicies.TestIamPermissions. See the method - description for details. - - Attributes: - project (str): - Project ID for this request. - region (str): - The name of the region for this request. - resource (str): - Name or id of the resource for this request. - test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): - The body resource for this request - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( - proto.MESSAGE, - number=439214758, - message="TestPermissionsRequest", - ) - - -class TestIamPermissionsReservationRequest(proto.Message): - r"""A request message for Reservations.TestIamPermissions. See +class TestIamPermissionsStoragePoolRequest(proto.Message): + r"""A request message for StoragePools.TestIamPermissions. See the method description for details. Attributes: @@ -101794,102 +105600,6 @@ class TestIamPermissionsReservationRequest(proto.Message): ) -class TestIamPermissionsResourcePolicyRequest(proto.Message): - r"""A request message for ResourcePolicies.TestIamPermissions. - See the method description for details. - - Attributes: - project (str): - Project ID for this request. - region (str): - The name of the region for this request. - resource (str): - Name or id of the resource for this request. - test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): - The body resource for this request - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( - proto.MESSAGE, - number=439214758, - message="TestPermissionsRequest", - ) - - -class TestIamPermissionsServiceAttachmentRequest(proto.Message): - r"""A request message for ServiceAttachments.TestIamPermissions. - See the method description for details. - - Attributes: - project (str): - Project ID for this request. - region (str): - The name of the region for this request. - resource (str): - Name or id of the resource for this request. - test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): - The body resource for this request - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( - proto.MESSAGE, - number=439214758, - message="TestPermissionsRequest", - ) - - -class TestIamPermissionsSnapshotRequest(proto.Message): - r"""A request message for Snapshots.TestIamPermissions. See the - method description for details. - - Attributes: - project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. - test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): - The body resource for this request - """ - - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( - proto.MESSAGE, - number=439214758, - message="TestPermissionsRequest", - ) - - class TestIamPermissionsSubnetworkRequest(proto.Message): r"""A request message for Subnetworks.TestIamPermissions. See the method description for details. @@ -103573,6 +107283,76 @@ class UpdateShieldedInstanceConfigInstanceRequest(proto.Message): ) +class UpdateStoragePoolRequest(proto.Message): + r"""A request message for StoragePools.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. For example, consider a situation + where you make an initial request and the + request times out. If you make the request again + with the same request ID, the server can check + if original operation with the same request ID + was received, and if so, will ignore the second + request. This prevents clients from accidentally + creating duplicate commitments. The request ID + must be a valid UUID with the exception that + zero UUID is not supported ( + 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + storage_pool (str): + The storagePool name for this request. + storage_pool_resource (google.cloud.compute_v1.types.StoragePool): + The body resource for this request + update_mask (str): + update_mask indicates fields to be updated as part of this + request. + + This field is a member of `oneof`_ ``_update_mask``. + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + storage_pool: str = proto.Field( + proto.STRING, + number=360473440, + ) + storage_pool_resource: "StoragePool" = proto.Field( + proto.MESSAGE, + number=157179405, + message="StoragePool", + ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class UpdateUrlMapRequest(proto.Message): r"""A request message for UrlMaps.Update. See the method description for details. @@ -103688,10 +107468,10 @@ class UrlMap(proto.Message): cannot contain any weightedBackendServices. Conversely, if routeAction specifies any weightedBackendServices, service must not be - specified. Only one of defaultService, - defaultUrlRedirect , or - defaultRouteAction.weightedBackendService must - be set. defaultService has no effect when the + specified. If defaultService is specified, then + set either defaultUrlRedirect , or + defaultRouteAction.weightedBackendService Don't + set both. defaultService has no effect when the URL map is bound to a target gRPC proxy that has the validateForProxyless field set to true. diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_cancel_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_cancel_sync.py new file mode 100644 index 000000000000..e8a149d6b3e7 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_cancel_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Cancel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagerResizeRequests_Cancel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_cancel(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.CancelInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + resize_request="resize_request_value", + zone="zone_value", + ) + + # Make the request + response = client.cancel(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagerResizeRequests_Cancel_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_delete_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_delete_sync.py new file mode 100644 index 000000000000..5b9b46ce7979 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_delete_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagerResizeRequests_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + resize_request="resize_request_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagerResizeRequests_Delete_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_get_sync.py new file mode 100644 index 000000000000..7d63555cd6e6 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_get_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagerResizeRequests_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + resize_request="resize_request_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagerResizeRequests_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_insert_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_insert_sync.py new file mode 100644 index 000000000000..24e7ae7d9205 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_insert_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagerResizeRequests_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceGroupManagerResizeRequests_Insert_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_list_sync.py new file mode 100644 index 000000000000..2c2d357fd517 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_group_manager_resize_requests_list_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceGroupManagerResizeRequests_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstanceGroupManagerResizeRequestsRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_InstanceGroupManagerResizeRequests_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_settings_service_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_settings_service_get_sync.py new file mode 100644 index 000000000000..9687fff65387 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_settings_service_get_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceSettingsService_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InstanceSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.GetInstanceSettingRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceSettingsService_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_settings_service_patch_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_settings_service_patch_sync.py new file mode 100644 index 000000000000..a6e849239e1e --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instance_settings_service_patch_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstanceSettingsService_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.InstanceSettingsServiceClient() + + # Initialize request argument(s) + request = compute_v1.PatchInstanceSettingRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_InstanceSettingsService_Patch_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_node_groups_perform_maintenance_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_node_groups_perform_maintenance_sync.py new file mode 100644 index 000000000000..9e0d1d85e1b5 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_node_groups_perform_maintenance_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for PerformMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_NodeGroups_PerformMaintenance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_perform_maintenance(): + # Create a client + client = compute_v1.NodeGroupsClient() + + # Initialize request argument(s) + request = compute_v1.PerformMaintenanceNodeGroupRequest( + node_group="node_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.perform_maintenance(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_NodeGroups_PerformMaintenance_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_aggregated_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_aggregated_list_sync.py new file mode 100644 index 000000000000..f72d2e7c4fc9 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePoolTypes_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.StoragePoolTypesClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListStoragePoolTypesRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_StoragePoolTypes_AggregatedList_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_get_sync.py new file mode 100644 index 000000000000..20a4b0bd83a8 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePoolTypes_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.StoragePoolTypesClient() + + # Initialize request argument(s) + request = compute_v1.GetStoragePoolTypeRequest( + project="project_value", + storage_pool_type="storage_pool_type_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_StoragePoolTypes_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_list_sync.py new file mode 100644 index 000000000000..94b2ddda4ef9 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pool_types_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePoolTypes_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.StoragePoolTypesClient() + + # Initialize request argument(s) + request = compute_v1.ListStoragePoolTypesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_StoragePoolTypes_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_aggregated_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_aggregated_list_sync.py new file mode 100644 index 000000000000..b5157795202e --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_aggregated_list_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListStoragePoolsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_StoragePools_AggregatedList_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_delete_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_delete_sync.py new file mode 100644 index 000000000000..d8d19295fce4 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_delete_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteStoragePoolRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_StoragePools_Delete_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_get_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_get_iam_policy_sync.py new file mode 100644 index 000000000000..b8bed19f18a6 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_get_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyStoragePoolRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_StoragePools_GetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_get_sync.py new file mode 100644 index 000000000000..c706e2a8bbb3 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.GetStoragePoolRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_StoragePools_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_insert_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_insert_sync.py new file mode 100644 index 000000000000..2d13e1b9301d --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_insert_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.InsertStoragePoolRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_StoragePools_Insert_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_list_disks_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_list_disks_sync.py new file mode 100644 index 000000000000..09353b98cc7e --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_list_disks_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDisks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_ListDisks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_disks(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.ListDisksStoragePoolsRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list_disks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_StoragePools_ListDisks_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_list_sync.py new file mode 100644 index 000000000000..93d6f86a3449 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.ListStoragePoolsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END compute_v1_generated_StoragePools_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_set_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_set_iam_policy_sync.py new file mode 100644 index 000000000000..325d051173b5 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_set_iam_policy_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyStoragePoolRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_StoragePools_SetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_test_iam_permissions_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_test_iam_permissions_sync.py new file mode 100644 index 000000000000..1aefcc2a4193 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsStoragePoolRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_StoragePools_TestIamPermissions_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_update_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_update_sync.py new file mode 100644 index 000000000000..375d016d6407 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_storage_pools_update_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_StoragePools_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.StoragePoolsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateStoragePoolRequest( + project="project_value", + storage_pool="storage_pool_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + +# [END compute_v1_generated_StoragePools_Update_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json index 4573426b0d99..8a5a8ea62778 100644 --- a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json +++ b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-compute", - "version": "1.18.0" + "version": "0.1.0" }, "snippets": [ { @@ -13307,22 +13307,22 @@ "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", - "shortName": "InstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient", + "shortName": "InstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.abandon_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient.cancel", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.AbandonInstances", + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests.Cancel", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers", - "shortName": "InstanceGroupManagers" + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests", + "shortName": "InstanceGroupManagerResizeRequests" }, - "shortName": "AbandonInstances" + "shortName": "Cancel" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.CancelInstanceGroupManagerResizeRequestRequest" }, { "name": "project", @@ -13337,8 +13337,8 @@ "type": "str" }, { - "name": "instance_group_managers_abandon_instances_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersAbandonInstancesRequest" + "name": "resize_request", + "type": "str" }, { "name": "retry", @@ -13354,21 +13354,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "abandon_instances" + "shortName": "cancel" }, - "description": "Sample for AbandonInstances", - "file": "compute_v1_generated_instance_group_managers_abandon_instances_sync.py", + "description": "Sample for Cancel", + "file": "compute_v1_generated_instance_group_manager_resize_requests_cancel_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_AbandonInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagerResizeRequests_Cancel_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -13378,48 +13378,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_abandon_instances_sync.py" + "title": "compute_v1_generated_instance_group_manager_resize_requests_cancel_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", - "shortName": "InstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient", + "shortName": "InstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.aggregated_list", + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.AggregatedList", + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests.Delete", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers", - "shortName": "InstanceGroupManagers" + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests", + "shortName": "InstanceGroupManagerResizeRequests" }, - "shortName": "AggregatedList" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.DeleteInstanceGroupManagerResizeRequestRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "resize_request", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -13433,22 +13445,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_instance_group_managers_aggregated_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_instance_group_manager_resize_requests_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_AggregatedList_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagerResizeRequests_Delete_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -13458,43 +13470,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_aggregated_list_sync.py" + "title": "compute_v1_generated_instance_group_manager_resize_requests_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", - "shortName": "InstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient", + "shortName": "InstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.apply_updates_to_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient.get", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ApplyUpdatesToInstances", + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests.Get", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers", - "shortName": "InstanceGroupManagers" + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests", + "shortName": "InstanceGroupManagerResizeRequests" }, - "shortName": "ApplyUpdatesToInstances" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ApplyUpdatesToInstancesInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.GetInstanceGroupManagerResizeRequestRequest" }, { "name": "project", @@ -13509,8 +13521,8 @@ "type": "str" }, { - "name": "instance_group_managers_apply_updates_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersApplyUpdatesRequest" + "name": "resize_request", + "type": "str" }, { "name": "retry", @@ -13525,22 +13537,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "apply_updates_to_instances" + "resultType": "google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest", + "shortName": "get" }, - "description": "Sample for ApplyUpdatesToInstances", - "file": "compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_instance_group_manager_resize_requests_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_ApplyUpdatesToInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagerResizeRequests_Get_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -13550,43 +13562,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py" + "title": "compute_v1_generated_instance_group_manager_resize_requests_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", - "shortName": "InstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient", + "shortName": "InstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.create_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.CreateInstances", + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests.Insert", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers", - "shortName": "InstanceGroupManagers" + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests", + "shortName": "InstanceGroupManagerResizeRequests" }, - "shortName": "CreateInstances" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.CreateInstancesInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.InsertInstanceGroupManagerResizeRequestRequest" }, { "name": "project", @@ -13601,8 +13613,8 @@ "type": "str" }, { - "name": "instance_group_managers_create_instances_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersCreateInstancesRequest" + "name": "instance_group_manager_resize_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest" }, { "name": "retry", @@ -13618,13 +13630,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "create_instances" + "shortName": "insert" }, - "description": "Sample for CreateInstances", - "file": "compute_v1_generated_instance_group_managers_create_instances_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_instance_group_manager_resize_requests_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_CreateInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagerResizeRequests_Insert_sync", "segments": [ { "end": 53, @@ -13657,28 +13669,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_create_instances_sync.py" + "title": "compute_v1_generated_instance_group_manager_resize_requests_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", - "shortName": "InstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient", + "shortName": "InstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupManagerResizeRequestsClient.list", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.DeleteInstances", + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests.List", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers", - "shortName": "InstanceGroupManagers" + "fullName": "google.cloud.compute.v1.InstanceGroupManagerResizeRequests", + "shortName": "InstanceGroupManagerResizeRequests" }, - "shortName": "DeleteInstances" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInstancesInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListInstanceGroupManagerResizeRequestsRequest" }, { "name": "project", @@ -13692,10 +13704,6 @@ "name": "instance_group_manager", "type": "str" }, - { - "name": "instance_group_managers_delete_instances_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersDeleteInstancesRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -13709,22 +13717,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete_instances" + "resultType": "google.cloud.compute_v1.services.instance_group_manager_resize_requests.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for DeleteInstances", - "file": "compute_v1_generated_instance_group_managers_delete_instances_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_instance_group_manager_resize_requests_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_DeleteInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagerResizeRequests_List_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -13744,12 +13752,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_delete_instances_sync.py" + "title": "compute_v1_generated_instance_group_manager_resize_requests_list_sync.py" }, { "canonical": true, @@ -13758,19 +13766,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete_per_instance_configs", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.abandon_instances", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.DeletePerInstanceConfigs", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.AbandonInstances", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "DeletePerInstanceConfigs" + "shortName": "AbandonInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeletePerInstanceConfigsInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest" }, { "name": "project", @@ -13785,8 +13793,8 @@ "type": "str" }, { - "name": "instance_group_managers_delete_per_instance_configs_req_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersDeletePerInstanceConfigsReq" + "name": "instance_group_managers_abandon_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersAbandonInstancesRequest" }, { "name": "retry", @@ -13802,13 +13810,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete_per_instance_configs" + "shortName": "abandon_instances" }, - "description": "Sample for DeletePerInstanceConfigs", - "file": "compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py", + "description": "Sample for AbandonInstances", + "file": "compute_v1_generated_instance_group_managers_abandon_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_DeletePerInstanceConfigs_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_AbandonInstances_sync", "segments": [ { "end": 53, @@ -13841,7 +13849,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py" + "title": "compute_v1_generated_instance_group_managers_abandon_instances_sync.py" }, { "canonical": true, @@ -13850,19 +13858,99 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Delete", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "Delete" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.AggregatedListInstanceGroupManagersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_instance_group_managers_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceGroupManagers_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_group_managers_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" + }, + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.apply_updates_to_instances", + "method": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ApplyUpdatesToInstances", + "service": { + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" + }, + "shortName": "ApplyUpdatesToInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ApplyUpdatesToInstancesInstanceGroupManagerRequest" }, { "name": "project", @@ -13876,6 +13964,10 @@ "name": "instance_group_manager", "type": "str" }, + { + "name": "instance_group_managers_apply_updates_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersApplyUpdatesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -13890,13 +13982,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "apply_updates_to_instances" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_instance_group_managers_delete_sync.py", + "description": "Sample for ApplyUpdatesToInstances", + "file": "compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_Delete_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_ApplyUpdatesToInstances_sync", "segments": [ { "end": 53, @@ -13929,7 +14021,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_delete_sync.py" + "title": "compute_v1_generated_instance_group_managers_apply_updates_to_instances_sync.py" }, { "canonical": true, @@ -13938,19 +14030,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.get", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.create_instances", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Get", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.CreateInstances", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "Get" + "shortName": "CreateInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.CreateInstancesInstanceGroupManagerRequest" }, { "name": "project", @@ -13964,6 +14056,10 @@ "name": "instance_group_manager", "type": "str" }, + { + "name": "instance_group_managers_create_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersCreateInstancesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -13977,14 +14073,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.InstanceGroupManager", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "create_instances" }, - "description": "Sample for Get", - "file": "compute_v1_generated_instance_group_managers_get_sync.py", + "description": "Sample for CreateInstances", + "file": "compute_v1_generated_instance_group_managers_create_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_Get_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_CreateInstances_sync", "segments": [ { "end": 53, @@ -14017,7 +14113,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_get_sync.py" + "title": "compute_v1_generated_instance_group_managers_create_instances_sync.py" }, { "canonical": true, @@ -14026,19 +14122,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.insert", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete_instances", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Insert", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.DeleteInstances", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "Insert" + "shortName": "DeleteInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.DeleteInstancesInstanceGroupManagerRequest" }, { "name": "project", @@ -14049,8 +14145,12 @@ "type": "str" }, { - "name": "instance_group_manager_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManager" + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_delete_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersDeleteInstancesRequest" }, { "name": "retry", @@ -14066,21 +14166,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "delete_instances" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_instance_group_managers_insert_sync.py", + "description": "Sample for DeleteInstances", + "file": "compute_v1_generated_instance_group_managers_delete_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_Insert_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_DeleteInstances_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -14090,22 +14190,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_insert_sync.py" + "title": "compute_v1_generated_instance_group_managers_delete_instances_sync.py" }, { "canonical": true, @@ -14114,19 +14214,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_errors", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete_per_instance_configs", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListErrors", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.DeletePerInstanceConfigs", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "ListErrors" + "shortName": "DeletePerInstanceConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListErrorsInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.DeletePerInstanceConfigsInstanceGroupManagerRequest" }, { "name": "project", @@ -14140,6 +14240,10 @@ "name": "instance_group_manager", "type": "str" }, + { + "name": "instance_group_managers_delete_per_instance_configs_req_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersDeletePerInstanceConfigsReq" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -14153,22 +14257,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListErrorsPager", - "shortName": "list_errors" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_per_instance_configs" }, - "description": "Sample for ListErrors", - "file": "compute_v1_generated_instance_group_managers_list_errors_sync.py", + "description": "Sample for DeletePerInstanceConfigs", + "file": "compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_ListErrors_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_DeletePerInstanceConfigs_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -14188,12 +14292,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 54, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_list_errors_sync.py" + "title": "compute_v1_generated_instance_group_managers_delete_per_instance_configs_sync.py" }, { "canonical": true, @@ -14202,19 +14306,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_managed_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.delete", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListManagedInstances", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Delete", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "ListManagedInstances" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListManagedInstancesInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.DeleteInstanceGroupManagerRequest" }, { "name": "project", @@ -14241,22 +14345,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListManagedInstancesPager", - "shortName": "list_managed_instances" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for ListManagedInstances", - "file": "compute_v1_generated_instance_group_managers_list_managed_instances_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_instance_group_managers_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_ListManagedInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Delete_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -14276,12 +14380,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 54, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_list_managed_instances_sync.py" + "title": "compute_v1_generated_instance_group_managers_delete_sync.py" }, { "canonical": true, @@ -14290,19 +14394,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_per_instance_configs", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.get", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListPerInstanceConfigs", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Get", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "ListPerInstanceConfigs" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListPerInstanceConfigsInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.GetInstanceGroupManagerRequest" }, { "name": "project", @@ -14329,22 +14433,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListPerInstanceConfigsPager", - "shortName": "list_per_instance_configs" + "resultType": "google.cloud.compute_v1.types.InstanceGroupManager", + "shortName": "get" }, - "description": "Sample for ListPerInstanceConfigs", - "file": "compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_instance_group_managers_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_ListPerInstanceConfigs_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Get_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -14364,12 +14468,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 54, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py" + "title": "compute_v1_generated_instance_group_managers_get_sync.py" }, { "canonical": true, @@ -14378,19 +14482,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.insert", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.List", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Insert", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "List" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.InsertInstanceGroupManagerRequest" }, { "name": "project", @@ -14400,6 +14504,10 @@ "name": "zone", "type": "str" }, + { + "name": "instance_group_manager_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManager" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -14413,22 +14521,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for List", - "file": "compute_v1_generated_instance_group_managers_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_instance_group_managers_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_List_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -14448,12 +14556,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_list_sync.py" + "title": "compute_v1_generated_instance_group_managers_insert_sync.py" }, { "canonical": true, @@ -14462,19 +14570,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.patch_per_instance_configs", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_errors", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.PatchPerInstanceConfigs", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListErrors", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "PatchPerInstanceConfigs" + "shortName": "ListErrors" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchPerInstanceConfigsInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListErrorsInstanceGroupManagersRequest" }, { "name": "project", @@ -14488,10 +14596,6 @@ "name": "instance_group_manager", "type": "str" }, - { - "name": "instance_group_managers_patch_per_instance_configs_req_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersPatchPerInstanceConfigsReq" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -14505,22 +14609,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch_per_instance_configs" + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListErrorsPager", + "shortName": "list_errors" }, - "description": "Sample for PatchPerInstanceConfigs", - "file": "compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py", + "description": "Sample for ListErrors", + "file": "compute_v1_generated_instance_group_managers_list_errors_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_PatchPerInstanceConfigs_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_ListErrors_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -14540,12 +14644,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py" + "title": "compute_v1_generated_instance_group_managers_list_errors_sync.py" }, { "canonical": true, @@ -14554,19 +14658,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.patch", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_managed_instances", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Patch", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListManagedInstances", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "Patch" + "shortName": "ListManagedInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListManagedInstancesInstanceGroupManagersRequest" }, { "name": "project", @@ -14580,10 +14684,6 @@ "name": "instance_group_manager", "type": "str" }, - { - "name": "instance_group_manager_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManager" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -14597,22 +14697,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListManagedInstancesPager", + "shortName": "list_managed_instances" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_instance_group_managers_patch_sync.py", + "description": "Sample for ListManagedInstances", + "file": "compute_v1_generated_instance_group_managers_list_managed_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_Patch_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_ListManagedInstances_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -14632,12 +14732,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_patch_sync.py" + "title": "compute_v1_generated_instance_group_managers_list_managed_instances_sync.py" }, { "canonical": true, @@ -14646,19 +14746,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.recreate_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list_per_instance_configs", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.RecreateInstances", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.ListPerInstanceConfigs", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "RecreateInstances" + "shortName": "ListPerInstanceConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RecreateInstancesInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListPerInstanceConfigsInstanceGroupManagersRequest" }, { "name": "project", @@ -14672,10 +14772,6 @@ "name": "instance_group_manager", "type": "str" }, - { - "name": "instance_group_managers_recreate_instances_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersRecreateInstancesRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -14689,22 +14785,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "recreate_instances" + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListPerInstanceConfigsPager", + "shortName": "list_per_instance_configs" }, - "description": "Sample for RecreateInstances", - "file": "compute_v1_generated_instance_group_managers_recreate_instances_sync.py", + "description": "Sample for ListPerInstanceConfigs", + "file": "compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_RecreateInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_ListPerInstanceConfigs_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -14724,12 +14820,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_recreate_instances_sync.py" + "title": "compute_v1_generated_instance_group_managers_list_per_instance_configs_sync.py" }, { "canonical": true, @@ -14738,19 +14834,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.resize", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.list", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Resize", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.List", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "Resize" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ResizeInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListInstanceGroupManagersRequest" }, { "name": "project", @@ -14760,14 +14856,6 @@ "name": "zone", "type": "str" }, - { - "name": "instance_group_manager", - "type": "str" - }, - { - "name": "size", - "type": "int" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -14781,22 +14869,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "resize" + "resultType": "google.cloud.compute_v1.services.instance_group_managers.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Resize", - "file": "compute_v1_generated_instance_group_managers_resize_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_instance_group_managers_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_Resize_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_List_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -14806,22 +14894,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_resize_sync.py" + "title": "compute_v1_generated_instance_group_managers_list_sync.py" }, { "canonical": true, @@ -14830,19 +14918,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.set_instance_template", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.patch_per_instance_configs", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.SetInstanceTemplate", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.PatchPerInstanceConfigs", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "SetInstanceTemplate" + "shortName": "PatchPerInstanceConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetInstanceTemplateInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.PatchPerInstanceConfigsInstanceGroupManagerRequest" }, { "name": "project", @@ -14857,8 +14945,8 @@ "type": "str" }, { - "name": "instance_group_managers_set_instance_template_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersSetInstanceTemplateRequest" + "name": "instance_group_managers_patch_per_instance_configs_req_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersPatchPerInstanceConfigsReq" }, { "name": "retry", @@ -14874,13 +14962,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_instance_template" + "shortName": "patch_per_instance_configs" }, - "description": "Sample for SetInstanceTemplate", - "file": "compute_v1_generated_instance_group_managers_set_instance_template_sync.py", + "description": "Sample for PatchPerInstanceConfigs", + "file": "compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_SetInstanceTemplate_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_PatchPerInstanceConfigs_sync", "segments": [ { "end": 53, @@ -14913,7 +15001,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_set_instance_template_sync.py" + "title": "compute_v1_generated_instance_group_managers_patch_per_instance_configs_sync.py" }, { "canonical": true, @@ -14922,19 +15010,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.set_target_pools", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.patch", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.SetTargetPools", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Patch", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "SetTargetPools" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetTargetPoolsInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.PatchInstanceGroupManagerRequest" }, { "name": "project", @@ -14949,8 +15037,8 @@ "type": "str" }, { - "name": "instance_group_managers_set_target_pools_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersSetTargetPoolsRequest" + "name": "instance_group_manager_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManager" }, { "name": "retry", @@ -14966,13 +15054,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_target_pools" + "shortName": "patch" }, - "description": "Sample for SetTargetPools", - "file": "compute_v1_generated_instance_group_managers_set_target_pools_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_instance_group_managers_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_SetTargetPools_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Patch_sync", "segments": [ { "end": 53, @@ -15005,7 +15093,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_set_target_pools_sync.py" + "title": "compute_v1_generated_instance_group_managers_patch_sync.py" }, { "canonical": true, @@ -15014,19 +15102,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.update_per_instance_configs", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.recreate_instances", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroupManagers.UpdatePerInstanceConfigs", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.RecreateInstances", "service": { "fullName": "google.cloud.compute.v1.InstanceGroupManagers", "shortName": "InstanceGroupManagers" }, - "shortName": "UpdatePerInstanceConfigs" + "shortName": "RecreateInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdatePerInstanceConfigsInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.RecreateInstancesInstanceGroupManagerRequest" }, { "name": "project", @@ -15041,8 +15129,8 @@ "type": "str" }, { - "name": "instance_group_managers_update_per_instance_configs_req_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManagersUpdatePerInstanceConfigsReq" + "name": "instance_group_managers_recreate_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersRecreateInstancesRequest" }, { "name": "retry", @@ -15058,13 +15146,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update_per_instance_configs" + "shortName": "recreate_instances" }, - "description": "Sample for UpdatePerInstanceConfigs", - "file": "compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py", + "description": "Sample for RecreateInstances", + "file": "compute_v1_generated_instance_group_managers_recreate_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroupManagers_UpdatePerInstanceConfigs_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_RecreateInstances_sync", "segments": [ { "end": 53, @@ -15097,28 +15185,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py" + "title": "compute_v1_generated_instance_group_managers_recreate_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupsClient", - "shortName": "InstanceGroupsClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.add_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.resize", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.AddInstances", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.Resize", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroups", - "shortName": "InstanceGroups" + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" }, - "shortName": "AddInstances" + "shortName": "Resize" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddInstancesInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.ResizeInstanceGroupManagerRequest" }, { "name": "project", @@ -15129,12 +15217,12 @@ "type": "str" }, { - "name": "instance_group", + "name": "instance_group_manager", "type": "str" }, { - "name": "instance_groups_add_instances_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupsAddInstancesRequest" + "name": "size", + "type": "int" }, { "name": "retry", @@ -15150,21 +15238,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_instances" + "shortName": "resize" }, - "description": "Sample for AddInstances", - "file": "compute_v1_generated_instance_groups_add_instances_sync.py", + "description": "Sample for Resize", + "file": "compute_v1_generated_instance_group_managers_resize_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_AddInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_Resize_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -15174,48 +15262,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_add_instances_sync.py" + "title": "compute_v1_generated_instance_group_managers_resize_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupsClient", - "shortName": "InstanceGroupsClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.set_instance_template", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.AggregatedList", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.SetInstanceTemplate", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroups", - "shortName": "InstanceGroups" + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" }, - "shortName": "AggregatedList" + "shortName": "SetInstanceTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListInstanceGroupsRequest" + "type": "google.cloud.compute_v1.types.SetInstanceTemplateInstanceGroupManagerRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "instance_group_managers_set_instance_template_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersSetInstanceTemplateRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -15229,22 +15329,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_instance_template" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_instance_groups_aggregated_list_sync.py", + "description": "Sample for SetInstanceTemplate", + "file": "compute_v1_generated_instance_group_managers_set_instance_template_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_AggregatedList_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_SetInstanceTemplate_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -15254,43 +15354,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_aggregated_list_sync.py" + "title": "compute_v1_generated_instance_group_managers_set_instance_template_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupsClient", - "shortName": "InstanceGroupsClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.delete", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.set_target_pools", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.Delete", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.SetTargetPools", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroups", - "shortName": "InstanceGroups" + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" }, - "shortName": "Delete" + "shortName": "SetTargetPools" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.SetTargetPoolsInstanceGroupManagerRequest" }, { "name": "project", @@ -15301,9 +15401,13 @@ "type": "str" }, { - "name": "instance_group", + "name": "instance_group_manager", "type": "str" }, + { + "name": "instance_group_managers_set_target_pools_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersSetTargetPoolsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -15318,13 +15422,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "set_target_pools" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_instance_groups_delete_sync.py", + "description": "Sample for SetTargetPools", + "file": "compute_v1_generated_instance_group_managers_set_target_pools_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_Delete_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_SetTargetPools_sync", "segments": [ { "end": 53, @@ -15357,28 +15461,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_delete_sync.py" + "title": "compute_v1_generated_instance_group_managers_set_target_pools_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceGroupsClient", - "shortName": "InstanceGroupsClient" + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient", + "shortName": "InstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.get", + "fullName": "google.cloud.compute_v1.InstanceGroupManagersClient.update_per_instance_configs", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.Get", + "fullName": "google.cloud.compute.v1.InstanceGroupManagers.UpdatePerInstanceConfigs", "service": { - "fullName": "google.cloud.compute.v1.InstanceGroups", - "shortName": "InstanceGroups" + "fullName": "google.cloud.compute.v1.InstanceGroupManagers", + "shortName": "InstanceGroupManagers" }, - "shortName": "Get" + "shortName": "UpdatePerInstanceConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.UpdatePerInstanceConfigsInstanceGroupManagerRequest" }, { "name": "project", @@ -15389,9 +15493,13 @@ "type": "str" }, { - "name": "instance_group", + "name": "instance_group_manager", "type": "str" }, + { + "name": "instance_group_managers_update_per_instance_configs_req_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagersUpdatePerInstanceConfigsReq" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -15405,14 +15513,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.InstanceGroup", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_per_instance_configs" }, - "description": "Sample for Get", - "file": "compute_v1_generated_instance_groups_get_sync.py", + "description": "Sample for UpdatePerInstanceConfigs", + "file": "compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_Get_sync", + "regionTag": "compute_v1_generated_InstanceGroupManagers_UpdatePerInstanceConfigs_sync", "segments": [ { "end": 53, @@ -15445,7 +15553,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_get_sync.py" + "title": "compute_v1_generated_instance_group_managers_update_per_instance_configs_sync.py" }, { "canonical": true, @@ -15454,19 +15562,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupsClient", "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.insert", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.add_instances", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.Insert", + "fullName": "google.cloud.compute.v1.InstanceGroups.AddInstances", "service": { "fullName": "google.cloud.compute.v1.InstanceGroups", "shortName": "InstanceGroups" }, - "shortName": "Insert" + "shortName": "AddInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.AddInstancesInstanceGroupRequest" }, { "name": "project", @@ -15477,8 +15585,12 @@ "type": "str" }, { - "name": "instance_group_resource", - "type": "google.cloud.compute_v1.types.InstanceGroup" + "name": "instance_group", + "type": "str" + }, + { + "name": "instance_groups_add_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupsAddInstancesRequest" }, { "name": "retry", @@ -15494,21 +15606,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "add_instances" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_instance_groups_insert_sync.py", + "description": "Sample for AddInstances", + "file": "compute_v1_generated_instance_groups_add_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_Insert_sync", + "regionTag": "compute_v1_generated_InstanceGroups_AddInstances_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -15518,22 +15630,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_insert_sync.py" + "title": "compute_v1_generated_instance_groups_add_instances_sync.py" }, { "canonical": true, @@ -15542,36 +15654,24 @@ "fullName": "google.cloud.compute_v1.InstanceGroupsClient", "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.list_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.ListInstances", + "fullName": "google.cloud.compute.v1.InstanceGroups.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.InstanceGroups", "shortName": "InstanceGroups" }, - "shortName": "ListInstances" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInstancesInstanceGroupsRequest" + "type": "google.cloud.compute_v1.types.AggregatedListInstanceGroupsRequest" }, { "name": "project", "type": "str" }, - { - "name": "zone", - "type": "str" - }, - { - "name": "instance_group", - "type": "str" - }, - { - "name": "instance_groups_list_instances_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -15585,22 +15685,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.ListInstancesPager", - "shortName": "list_instances" + "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for ListInstances", - "file": "compute_v1_generated_instance_groups_list_instances_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_instance_groups_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_ListInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroups_AggregatedList_sync", "segments": [ { - "end": 54, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 52, "start": 27, "type": "SHORT" }, @@ -15610,22 +15710,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_list_instances_sync.py" + "title": "compute_v1_generated_instance_groups_aggregated_list_sync.py" }, { "canonical": true, @@ -15634,19 +15734,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupsClient", "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.list", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.List", + "fullName": "google.cloud.compute.v1.InstanceGroups.Delete", "service": { "fullName": "google.cloud.compute.v1.InstanceGroups", "shortName": "InstanceGroups" }, - "shortName": "List" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInstanceGroupsRequest" + "type": "google.cloud.compute_v1.types.DeleteInstanceGroupRequest" }, { "name": "project", @@ -15656,6 +15756,10 @@ "name": "zone", "type": "str" }, + { + "name": "instance_group", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -15669,14 +15773,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for List", - "file": "compute_v1_generated_instance_groups_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_instance_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_List_sync", + "regionTag": "compute_v1_generated_InstanceGroups_Delete_sync", "segments": [ { "end": 53, @@ -15694,22 +15798,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_list_sync.py" + "title": "compute_v1_generated_instance_groups_delete_sync.py" }, { "canonical": true, @@ -15718,19 +15822,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupsClient", "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.remove_instances", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.RemoveInstances", + "fullName": "google.cloud.compute.v1.InstanceGroups.Get", "service": { "fullName": "google.cloud.compute.v1.InstanceGroups", "shortName": "InstanceGroups" }, - "shortName": "RemoveInstances" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveInstancesInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.GetInstanceGroupRequest" }, { "name": "project", @@ -15744,10 +15848,6 @@ "name": "instance_group", "type": "str" }, - { - "name": "instance_groups_remove_instances_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupsRemoveInstancesRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -15761,14 +15861,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_instances" + "resultType": "google.cloud.compute_v1.types.InstanceGroup", + "shortName": "get" }, - "description": "Sample for RemoveInstances", - "file": "compute_v1_generated_instance_groups_remove_instances_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_instance_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_RemoveInstances_sync", + "regionTag": "compute_v1_generated_InstanceGroups_Get_sync", "segments": [ { "end": 53, @@ -15801,7 +15901,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_remove_instances_sync.py" + "title": "compute_v1_generated_instance_groups_get_sync.py" }, { "canonical": true, @@ -15810,19 +15910,19 @@ "fullName": "google.cloud.compute_v1.InstanceGroupsClient", "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceGroupsClient.set_named_ports", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.InstanceGroups.SetNamedPorts", + "fullName": "google.cloud.compute.v1.InstanceGroups.Insert", "service": { "fullName": "google.cloud.compute.v1.InstanceGroups", "shortName": "InstanceGroups" }, - "shortName": "SetNamedPorts" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetNamedPortsInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.InsertInstanceGroupRequest" }, { "name": "project", @@ -15833,12 +15933,8 @@ "type": "str" }, { - "name": "instance_group", - "type": "str" - }, - { - "name": "instance_groups_set_named_ports_request_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupsSetNamedPortsRequest" + "name": "instance_group_resource", + "type": "google.cloud.compute_v1.types.InstanceGroup" }, { "name": "retry", @@ -15854,21 +15950,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_named_ports" + "shortName": "insert" }, - "description": "Sample for SetNamedPorts", - "file": "compute_v1_generated_instance_groups_set_named_ports_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_instance_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceGroups_SetNamedPorts_sync", + "regionTag": "compute_v1_generated_InstanceGroups_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -15878,48 +15974,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_groups_set_named_ports_sync.py" + "title": "compute_v1_generated_instance_groups_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", - "shortName": "InstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.list_instances", "method": { - "fullName": "google.cloud.compute.v1.InstanceTemplates.AggregatedList", + "fullName": "google.cloud.compute.v1.InstanceGroups.ListInstances", "service": { - "fullName": "google.cloud.compute.v1.InstanceTemplates", - "shortName": "InstanceTemplates" + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" }, - "shortName": "AggregatedList" + "shortName": "ListInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListInstanceTemplatesRequest" + "type": "google.cloud.compute_v1.types.ListInstancesInstanceGroupsRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", + "type": "str" + }, + { + "name": "instance_groups_list_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -15933,22 +16041,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_templates.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.ListInstancesPager", + "shortName": "list_instances" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_instance_templates_aggregated_list_sync.py", + "description": "Sample for ListInstances", + "file": "compute_v1_generated_instance_groups_list_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceTemplates_AggregatedList_sync", + "regionTag": "compute_v1_generated_InstanceGroups_ListInstances_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -15958,50 +16066,50 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_templates_aggregated_list_sync.py" + "title": "compute_v1_generated_instance_groups_list_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", - "shortName": "InstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.delete", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.InstanceTemplates.Delete", + "fullName": "google.cloud.compute.v1.InstanceGroups.List", "service": { - "fullName": "google.cloud.compute.v1.InstanceTemplates", - "shortName": "InstanceTemplates" + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" }, - "shortName": "Delete" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.ListInstanceGroupsRequest" }, { "name": "project", "type": "str" }, { - "name": "instance_template", + "name": "zone", "type": "str" }, { @@ -16017,22 +16125,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.instance_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_instance_templates_delete_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_instance_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceTemplates_Delete_sync", + "regionTag": "compute_v1_generated_InstanceGroups_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -16052,42 +16160,50 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_templates_delete_sync.py" + "title": "compute_v1_generated_instance_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", - "shortName": "InstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.remove_instances", "method": { - "fullName": "google.cloud.compute.v1.InstanceTemplates.GetIamPolicy", + "fullName": "google.cloud.compute.v1.InstanceGroups.RemoveInstances", "service": { - "fullName": "google.cloud.compute.v1.InstanceTemplates", - "shortName": "InstanceTemplates" + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" }, - "shortName": "GetIamPolicy" + "shortName": "RemoveInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.RemoveInstancesInstanceGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", "type": "str" }, + { + "name": "instance_groups_remove_instances_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupsRemoveInstancesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -16101,22 +16217,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_instances" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_instance_templates_get_iam_policy_sync.py", + "description": "Sample for RemoveInstances", + "file": "compute_v1_generated_instance_groups_remove_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceTemplates_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_InstanceGroups_RemoveInstances_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -16126,52 +16242,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_templates_get_iam_policy_sync.py" + "title": "compute_v1_generated_instance_groups_remove_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", - "shortName": "InstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.InstanceGroupsClient", + "shortName": "InstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.get", + "fullName": "google.cloud.compute_v1.InstanceGroupsClient.set_named_ports", "method": { - "fullName": "google.cloud.compute.v1.InstanceTemplates.Get", + "fullName": "google.cloud.compute.v1.InstanceGroups.SetNamedPorts", "service": { - "fullName": "google.cloud.compute.v1.InstanceTemplates", - "shortName": "InstanceTemplates" + "fullName": "google.cloud.compute.v1.InstanceGroups", + "shortName": "InstanceGroups" }, - "shortName": "Get" + "shortName": "SetNamedPorts" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.SetNamedPortsInstanceGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "instance_template", + "name": "zone", + "type": "str" + }, + { + "name": "instance_group", "type": "str" }, + { + "name": "instance_groups_set_named_ports_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupsSetNamedPortsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -16185,22 +16309,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.InstanceTemplate", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_named_ports" }, - "description": "Sample for Get", - "file": "compute_v1_generated_instance_templates_get_sync.py", + "description": "Sample for SetNamedPorts", + "file": "compute_v1_generated_instance_groups_set_named_ports_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceTemplates_Get_sync", + "regionTag": "compute_v1_generated_InstanceGroups_SetNamedPorts_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -16210,51 +16334,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_templates_get_sync.py" + "title": "compute_v1_generated_instance_groups_set_named_ports_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", - "shortName": "InstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.InstanceSettingsServiceClient", + "shortName": "InstanceSettingsServiceClient" }, - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.insert", + "fullName": "google.cloud.compute_v1.InstanceSettingsServiceClient.get", "method": { - "fullName": "google.cloud.compute.v1.InstanceTemplates.Insert", + "fullName": "google.cloud.compute.v1.InstanceSettingsService.Get", "service": { - "fullName": "google.cloud.compute.v1.InstanceTemplates", - "shortName": "InstanceTemplates" + "fullName": "google.cloud.compute.v1.InstanceSettingsService", + "shortName": "InstanceSettingsService" }, - "shortName": "Insert" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.GetInstanceSettingRequest" }, { "name": "project", "type": "str" }, { - "name": "instance_template_resource", - "type": "google.cloud.compute_v1.types.InstanceTemplate" + "name": "zone", + "type": "str" }, { "name": "retry", @@ -16269,22 +16393,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.InstanceSettings", + "shortName": "get" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_instance_templates_insert_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_instance_settings_service_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceTemplates_Insert_sync", + "regionTag": "compute_v1_generated_InstanceSettingsService_Get_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -16294,48 +16418,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_templates_insert_sync.py" + "title": "compute_v1_generated_instance_settings_service_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", - "shortName": "InstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.InstanceSettingsServiceClient", + "shortName": "InstanceSettingsServiceClient" }, - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.list", + "fullName": "google.cloud.compute_v1.InstanceSettingsServiceClient.patch", "method": { - "fullName": "google.cloud.compute.v1.InstanceTemplates.List", + "fullName": "google.cloud.compute.v1.InstanceSettingsService.Patch", "service": { - "fullName": "google.cloud.compute.v1.InstanceTemplates", - "shortName": "InstanceTemplates" + "fullName": "google.cloud.compute.v1.InstanceSettingsService", + "shortName": "InstanceSettingsService" }, - "shortName": "List" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInstanceTemplatesRequest" + "type": "google.cloud.compute_v1.types.PatchInstanceSettingRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instance_settings_resource", + "type": "google.cloud.compute_v1.types.InstanceSettings" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -16349,14 +16481,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.instance_templates.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for List", - "file": "compute_v1_generated_instance_templates_list_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_instance_settings_service_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceTemplates_List_sync", + "regionTag": "compute_v1_generated_InstanceSettingsService_Patch_sync", "segments": [ { "end": 52, @@ -16374,22 +16506,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_templates_list_sync.py" + "title": "compute_v1_generated_instance_settings_service_patch_sync.py" }, { "canonical": true, @@ -16398,32 +16530,24 @@ "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", "shortName": "InstanceTemplatesClient" }, - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.InstanceTemplates.SetIamPolicy", + "fullName": "google.cloud.compute.v1.InstanceTemplates.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.InstanceTemplates", "shortName": "InstanceTemplates" }, - "shortName": "SetIamPolicy" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.AggregatedListInstanceTemplatesRequest" }, { "name": "project", "type": "str" }, - { - "name": "resource", - "type": "str" - }, - { - "name": "global_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -16437,14 +16561,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.services.instance_templates.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_instance_templates_set_iam_policy_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_instance_templates_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceTemplates_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_InstanceTemplates_AggregatedList_sync", "segments": [ { "end": 52, @@ -16462,22 +16586,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instance_templates_set_iam_policy_sync.py" + "title": "compute_v1_generated_instance_templates_aggregated_list_sync.py" }, { "canonical": true, @@ -16486,32 +16610,28 @@ "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", "shortName": "InstanceTemplatesClient" }, - "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.InstanceTemplates.TestIamPermissions", + "fullName": "google.cloud.compute.v1.InstanceTemplates.Delete", "service": { "fullName": "google.cloud.compute.v1.InstanceTemplates", "shortName": "InstanceTemplates" }, - "shortName": "TestIamPermissions" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.DeleteInstanceTemplateRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "instance_template", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -16525,14 +16645,522 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_instance_templates_test_iam_permissions_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_instance_templates_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstanceTemplates_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_InstanceTemplates_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_instance_templates_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.Get", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "instance_template", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.InstanceTemplate", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_instance_templates_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.Insert", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "instance_template_resource", + "type": "google.cloud.compute_v1.types.InstanceTemplate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_instance_templates_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.List", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInstanceTemplatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.instance_templates.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_instance_templates_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_instance_templates_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instance_templates_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient", + "shortName": "InstanceTemplatesClient" + }, + "fullName": "google.cloud.compute_v1.InstanceTemplatesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.InstanceTemplates.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.InstanceTemplates", + "shortName": "InstanceTemplates" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsInstanceTemplateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_instance_templates_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstanceTemplates_TestIamPermissions_sync", "segments": [ { "end": 52, @@ -30131,6 +30759,98 @@ ], "title": "compute_v1_generated_node_groups_patch_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" + }, + "fullName": "google.cloud.compute_v1.NodeGroupsClient.perform_maintenance", + "method": { + "fullName": "google.cloud.compute.v1.NodeGroups.PerformMaintenance", + "service": { + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" + }, + "shortName": "PerformMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PerformMaintenanceNodeGroupRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "node_groups_perform_maintenance_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsPerformMaintenanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "perform_maintenance" + }, + "description": "Sample for PerformMaintenance", + "file": "compute_v1_generated_node_groups_perform_maintenance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NodeGroups_PerformMaintenance_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_node_groups_perform_maintenance_sync.py" + }, { "canonical": true, "clientMethod": { @@ -54069,37 +54789,1129 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshot_settings_service_get_sync.py" + "title": "compute_v1_generated_snapshot_settings_service_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotSettingsServiceClient", + "shortName": "SnapshotSettingsServiceClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotSettingsServiceClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.SnapshotSettingsService.Patch", + "service": { + "fullName": "google.cloud.compute.v1.SnapshotSettingsService", + "shortName": "SnapshotSettingsService" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchSnapshotSettingRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot_settings_resource", + "type": "google.cloud.compute_v1.types.SnapshotSettings" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_snapshot_settings_service_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SnapshotSettingsService_Patch_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshot_settings_service_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.Delete", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_snapshots_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicySnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_snapshots_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.Get", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Snapshot", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_snapshots_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.Insert", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot_resource", + "type": "google.cloud.compute_v1.types.Snapshot" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_snapshots_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.List", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.snapshots.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_snapshots_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_List_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicySnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_snapshots_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.set_labels", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.SetLabels", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "SetLabels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetLabelsSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" + }, + "description": "Sample for SetLabels", + "file": "compute_v1_generated_snapshots_set_labels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_SetLabels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_snapshots_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_TestIamPermissions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListSslCertificatesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.services.ssl_certificates.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_ssl_certificates_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.Delete", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_certificate", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_ssl_certificates_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_Delete_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.Get", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_certificate", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.compute_v1.types.SslCertificate", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_ssl_certificates_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" + }, + "fullName": "google.cloud.compute_v1.SslCertificatesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.SslCertificates.Insert", + "service": { + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertSslCertificateRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "ssl_certificate_resource", + "type": "google.cloud.compute_v1.types.SslCertificate" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_ssl_certificates_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_SslCertificates_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_ssl_certificates_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotSettingsServiceClient", - "shortName": "SnapshotSettingsServiceClient" + "fullName": "google.cloud.compute_v1.SslCertificatesClient", + "shortName": "SslCertificatesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotSettingsServiceClient.patch", + "fullName": "google.cloud.compute_v1.SslCertificatesClient.list", "method": { - "fullName": "google.cloud.compute.v1.SnapshotSettingsService.Patch", + "fullName": "google.cloud.compute.v1.SslCertificates.List", "service": { - "fullName": "google.cloud.compute.v1.SnapshotSettingsService", - "shortName": "SnapshotSettingsService" + "fullName": "google.cloud.compute.v1.SslCertificates", + "shortName": "SslCertificates" }, - "shortName": "Patch" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchSnapshotSettingRequest" + "type": "google.cloud.compute_v1.types.ListSslCertificatesRequest" }, { "name": "project", "type": "str" }, - { - "name": "snapshot_settings_resource", - "type": "google.cloud.compute_v1.types.SnapshotSettings" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54113,22 +55925,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.services.ssl_certificates.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_snapshot_settings_service_patch_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_ssl_certificates_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SnapshotSettingsService_Patch_sync", + "regionTag": "compute_v1_generated_SslCertificates_List_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -54148,42 +55960,38 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshot_settings_service_patch_sync.py" + "title": "compute_v1_generated_ssl_certificates_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotsClient", - "shortName": "SnapshotsClient" + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotsClient.delete", + "fullName": "google.cloud.compute_v1.SslPoliciesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.Snapshots.Delete", + "fullName": "google.cloud.compute.v1.SslPolicies.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.Snapshots", - "shortName": "Snapshots" + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" }, - "shortName": "Delete" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteSnapshotRequest" + "type": "google.cloud.compute_v1.types.AggregatedListSslPoliciesRequest" }, { "name": "project", "type": "str" }, - { - "name": "snapshot", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54197,14 +56005,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.ssl_policies.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_snapshots_delete_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_ssl_policies_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Snapshots_Delete_sync", + "regionTag": "compute_v1_generated_SslPolicies_AggregatedList_sync", "segments": [ { "end": 52, @@ -54222,50 +56030,50 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshots_delete_sync.py" + "title": "compute_v1_generated_ssl_policies_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotsClient", - "shortName": "SnapshotsClient" + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotsClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.SslPoliciesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.Snapshots.GetIamPolicy", + "fullName": "google.cloud.compute.v1.SslPolicies.Delete", "service": { - "fullName": "google.cloud.compute.v1.Snapshots", - "shortName": "Snapshots" + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" }, - "shortName": "GetIamPolicy" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicySnapshotRequest" + "type": "google.cloud.compute_v1.types.DeleteSslPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "ssl_policy", "type": "str" }, { @@ -54281,14 +56089,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_snapshots_get_iam_policy_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_ssl_policies_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Snapshots_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_SslPolicies_Delete_sync", "segments": [ { "end": 52, @@ -54321,35 +56129,35 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshots_get_iam_policy_sync.py" + "title": "compute_v1_generated_ssl_policies_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotsClient", - "shortName": "SnapshotsClient" + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotsClient.get", + "fullName": "google.cloud.compute_v1.SslPoliciesClient.get", "method": { - "fullName": "google.cloud.compute.v1.Snapshots.Get", + "fullName": "google.cloud.compute.v1.SslPolicies.Get", "service": { - "fullName": "google.cloud.compute.v1.Snapshots", - "shortName": "Snapshots" + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetSnapshotRequest" + "type": "google.cloud.compute_v1.types.GetSslPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "snapshot", + "name": "ssl_policy", "type": "str" }, { @@ -54365,14 +56173,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Snapshot", + "resultType": "google.cloud.compute_v1.types.SslPolicy", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_snapshots_get_sync.py", + "file": "compute_v1_generated_ssl_policies_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Snapshots_Get_sync", + "regionTag": "compute_v1_generated_SslPolicies_Get_sync", "segments": [ { "end": 52, @@ -54405,36 +56213,36 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshots_get_sync.py" + "title": "compute_v1_generated_ssl_policies_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotsClient", - "shortName": "SnapshotsClient" + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotsClient.insert", + "fullName": "google.cloud.compute_v1.SslPoliciesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.Snapshots.Insert", + "fullName": "google.cloud.compute.v1.SslPolicies.Insert", "service": { - "fullName": "google.cloud.compute.v1.Snapshots", - "shortName": "Snapshots" + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" }, "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertSnapshotRequest" + "type": "google.cloud.compute_v1.types.InsertSslPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "snapshot_resource", - "type": "google.cloud.compute_v1.types.Snapshot" + "name": "ssl_policy_resource", + "type": "google.cloud.compute_v1.types.SslPolicy" }, { "name": "retry", @@ -54453,10 +56261,10 @@ "shortName": "insert" }, "description": "Sample for Insert", - "file": "compute_v1_generated_snapshots_insert_sync.py", + "file": "compute_v1_generated_ssl_policies_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Snapshots_Insert_sync", + "regionTag": "compute_v1_generated_SslPolicies_Insert_sync", "segments": [ { "end": 51, @@ -54489,28 +56297,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshots_insert_sync.py" + "title": "compute_v1_generated_ssl_policies_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotsClient", - "shortName": "SnapshotsClient" + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotsClient.list", + "fullName": "google.cloud.compute_v1.SslPoliciesClient.list_available_features", "method": { - "fullName": "google.cloud.compute.v1.Snapshots.List", + "fullName": "google.cloud.compute.v1.SslPolicies.ListAvailableFeatures", "service": { - "fullName": "google.cloud.compute.v1.Snapshots", - "shortName": "Snapshots" + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" }, - "shortName": "List" + "shortName": "ListAvailableFeatures" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListSnapshotsRequest" + "type": "google.cloud.compute_v1.types.ListAvailableFeaturesSslPoliciesRequest" }, { "name": "project", @@ -54529,22 +56337,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.snapshots.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.SslPoliciesListAvailableFeaturesResponse", + "shortName": "list_available_features" }, - "description": "Sample for List", - "file": "compute_v1_generated_snapshots_list_sync.py", + "description": "Sample for ListAvailableFeatures", + "file": "compute_v1_generated_ssl_policies_list_available_features_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Snapshots_List_sync", + "regionTag": "compute_v1_generated_SslPolicies_ListAvailableFeatures_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -54564,46 +56372,38 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshots_list_sync.py" + "title": "compute_v1_generated_ssl_policies_list_available_features_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotsClient", - "shortName": "SnapshotsClient" + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.SslPoliciesClient.list", "method": { - "fullName": "google.cloud.compute.v1.Snapshots.SetIamPolicy", + "fullName": "google.cloud.compute.v1.SslPolicies.List", "service": { - "fullName": "google.cloud.compute.v1.Snapshots", - "shortName": "Snapshots" + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" }, - "shortName": "SetIamPolicy" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicySnapshotRequest" + "type": "google.cloud.compute_v1.types.ListSslPoliciesRequest" }, { "name": "project", "type": "str" }, - { - "name": "resource", - "type": "str" - }, - { - "name": "global_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54617,14 +56417,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.services.ssl_policies.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_snapshots_set_iam_policy_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_ssl_policies_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Snapshots_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_SslPolicies_List_sync", "segments": [ { "end": 52, @@ -54642,55 +56442,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshots_set_iam_policy_sync.py" + "title": "compute_v1_generated_ssl_policies_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotsClient", - "shortName": "SnapshotsClient" + "fullName": "google.cloud.compute_v1.SslPoliciesClient", + "shortName": "SslPoliciesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotsClient.set_labels", + "fullName": "google.cloud.compute_v1.SslPoliciesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.Snapshots.SetLabels", + "fullName": "google.cloud.compute.v1.SslPolicies.Patch", "service": { - "fullName": "google.cloud.compute.v1.Snapshots", - "shortName": "Snapshots" + "fullName": "google.cloud.compute.v1.SslPolicies", + "shortName": "SslPolicies" }, - "shortName": "SetLabels" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsSnapshotRequest" + "type": "google.cloud.compute_v1.types.PatchSslPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "ssl_policy", "type": "str" }, { - "name": "global_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + "name": "ssl_policy_resource", + "type": "google.cloud.compute_v1.types.SslPolicy" }, { "name": "retry", @@ -54706,13 +56506,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "shortName": "patch" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_snapshots_set_labels_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_ssl_policies_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Snapshots_SetLabels_sync", + "regionTag": "compute_v1_generated_SslPolicies_Patch_sync", "segments": [ { "end": 52, @@ -54745,41 +56545,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshots_set_labels_sync.py" + "title": "compute_v1_generated_ssl_policies_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SnapshotsClient", - "shortName": "SnapshotsClient" + "fullName": "google.cloud.compute_v1.StoragePoolTypesClient", + "shortName": "StoragePoolTypesClient" }, - "fullName": "google.cloud.compute_v1.SnapshotsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.StoragePoolTypesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.Snapshots.TestIamPermissions", + "fullName": "google.cloud.compute.v1.StoragePoolTypes.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.Snapshots", - "shortName": "Snapshots" + "fullName": "google.cloud.compute.v1.StoragePoolTypes", + "shortName": "StoragePoolTypes" }, - "shortName": "TestIamPermissions" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsSnapshotRequest" + "type": "google.cloud.compute_v1.types.AggregatedListStoragePoolTypesRequest" }, { "name": "project", "type": "str" }, - { - "name": "resource", - "type": "str" - }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54793,14 +56585,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.services.storage_pool_types.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_snapshots_test_iam_permissions_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_storage_pool_types_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Snapshots_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_StoragePoolTypes_AggregatedList_sync", "segments": [ { "end": 52, @@ -54818,48 +56610,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_snapshots_test_iam_permissions_sync.py" + "title": "compute_v1_generated_storage_pool_types_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslCertificatesClient", - "shortName": "SslCertificatesClient" + "fullName": "google.cloud.compute_v1.StoragePoolTypesClient", + "shortName": "StoragePoolTypesClient" }, - "fullName": "google.cloud.compute_v1.SslCertificatesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.StoragePoolTypesClient.get", "method": { - "fullName": "google.cloud.compute.v1.SslCertificates.AggregatedList", + "fullName": "google.cloud.compute.v1.StoragePoolTypes.Get", "service": { - "fullName": "google.cloud.compute.v1.SslCertificates", - "shortName": "SslCertificates" + "fullName": "google.cloud.compute.v1.StoragePoolTypes", + "shortName": "StoragePoolTypes" }, - "shortName": "AggregatedList" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListSslCertificatesRequest" + "type": "google.cloud.compute_v1.types.GetStoragePoolTypeRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "storage_pool_type", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54873,22 +56673,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.ssl_certificates.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.types.StoragePoolType", + "shortName": "get" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_ssl_certificates_aggregated_list_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_storage_pool_types_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslCertificates_AggregatedList_sync", + "regionTag": "compute_v1_generated_StoragePoolTypes_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -54898,50 +56698,50 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_certificates_aggregated_list_sync.py" + "title": "compute_v1_generated_storage_pool_types_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslCertificatesClient", - "shortName": "SslCertificatesClient" + "fullName": "google.cloud.compute_v1.StoragePoolTypesClient", + "shortName": "StoragePoolTypesClient" }, - "fullName": "google.cloud.compute_v1.SslCertificatesClient.delete", + "fullName": "google.cloud.compute_v1.StoragePoolTypesClient.list", "method": { - "fullName": "google.cloud.compute.v1.SslCertificates.Delete", + "fullName": "google.cloud.compute.v1.StoragePoolTypes.List", "service": { - "fullName": "google.cloud.compute.v1.SslCertificates", - "shortName": "SslCertificates" + "fullName": "google.cloud.compute.v1.StoragePoolTypes", + "shortName": "StoragePoolTypes" }, - "shortName": "Delete" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteSslCertificateRequest" + "type": "google.cloud.compute_v1.types.ListStoragePoolTypesRequest" }, { "name": "project", "type": "str" }, { - "name": "ssl_certificate", + "name": "zone", "type": "str" }, { @@ -54957,22 +56757,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.storage_pool_types.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_ssl_certificates_delete_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_storage_pool_types_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslCertificates_Delete_sync", + "regionTag": "compute_v1_generated_StoragePoolTypes_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -54992,42 +56792,38 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_certificates_delete_sync.py" + "title": "compute_v1_generated_storage_pool_types_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslCertificatesClient", - "shortName": "SslCertificatesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslCertificatesClient.get", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.SslCertificates.Get", + "fullName": "google.cloud.compute.v1.StoragePools.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.SslCertificates", - "shortName": "SslCertificates" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "Get" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetSslCertificateRequest" + "type": "google.cloud.compute_v1.types.AggregatedListStoragePoolsRequest" }, { "name": "project", "type": "str" }, - { - "name": "ssl_certificate", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -55041,14 +56837,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.SslCertificate", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.storage_pools.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_ssl_certificates_get_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_storage_pools_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslCertificates_Get_sync", + "regionTag": "compute_v1_generated_StoragePools_AggregatedList_sync", "segments": [ { "end": 52, @@ -55066,51 +56862,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_certificates_get_sync.py" + "title": "compute_v1_generated_storage_pools_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslCertificatesClient", - "shortName": "SslCertificatesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslCertificatesClient.insert", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.SslCertificates.Insert", + "fullName": "google.cloud.compute.v1.StoragePools.Delete", "service": { - "fullName": "google.cloud.compute.v1.SslCertificates", - "shortName": "SslCertificates" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "Insert" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertSslCertificateRequest" + "type": "google.cloud.compute_v1.types.DeleteStoragePoolRequest" }, { "name": "project", "type": "str" }, { - "name": "ssl_certificate_resource", - "type": "google.cloud.compute_v1.types.SslCertificate" + "name": "zone", + "type": "str" + }, + { + "name": "storage_pool", + "type": "str" }, { "name": "retry", @@ -55126,21 +56926,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "delete" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_ssl_certificates_insert_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_storage_pools_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslCertificates_Insert_sync", + "regionTag": "compute_v1_generated_StoragePools_Delete_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -55150,48 +56950,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_certificates_insert_sync.py" + "title": "compute_v1_generated_storage_pools_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslCertificatesClient", - "shortName": "SslCertificatesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslCertificatesClient.list", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.SslCertificates.List", + "fullName": "google.cloud.compute.v1.StoragePools.GetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.SslCertificates", - "shortName": "SslCertificates" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "List" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListSslCertificatesRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyStoragePoolRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -55205,22 +57013,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.ssl_certificates.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for List", - "file": "compute_v1_generated_ssl_certificates_list_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_storage_pools_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslCertificates_List_sync", + "regionTag": "compute_v1_generated_StoragePools_GetIamPolicy_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -55230,48 +57038,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_certificates_list_sync.py" + "title": "compute_v1_generated_storage_pools_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslPoliciesClient", - "shortName": "SslPoliciesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslPoliciesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.get", "method": { - "fullName": "google.cloud.compute.v1.SslPolicies.AggregatedList", + "fullName": "google.cloud.compute.v1.StoragePools.Get", "service": { - "fullName": "google.cloud.compute.v1.SslPolicies", - "shortName": "SslPolicies" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "AggregatedList" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListSslPoliciesRequest" + "type": "google.cloud.compute_v1.types.GetStoragePoolRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "storage_pool", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -55285,22 +57101,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.ssl_policies.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.types.StoragePool", + "shortName": "get" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_ssl_policies_aggregated_list_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_storage_pools_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslPolicies_AggregatedList_sync", + "regionTag": "compute_v1_generated_StoragePools_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -55310,52 +57126,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_policies_aggregated_list_sync.py" + "title": "compute_v1_generated_storage_pools_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslPoliciesClient", - "shortName": "SslPoliciesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslPoliciesClient.delete", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.SslPolicies.Delete", + "fullName": "google.cloud.compute.v1.StoragePools.Insert", "service": { - "fullName": "google.cloud.compute.v1.SslPolicies", - "shortName": "SslPolicies" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "Delete" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteSslPolicyRequest" + "type": "google.cloud.compute_v1.types.InsertStoragePoolRequest" }, { "name": "project", "type": "str" }, { - "name": "ssl_policy", + "name": "zone", "type": "str" }, + { + "name": "storage_pool_resource", + "type": "google.cloud.compute_v1.types.StoragePool" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -55370,13 +57190,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "insert" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_ssl_policies_delete_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_storage_pools_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslPolicies_Delete_sync", + "regionTag": "compute_v1_generated_StoragePools_Insert_sync", "segments": [ { "end": 52, @@ -55409,35 +57229,39 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_policies_delete_sync.py" + "title": "compute_v1_generated_storage_pools_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslPoliciesClient", - "shortName": "SslPoliciesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslPoliciesClient.get", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.list_disks", "method": { - "fullName": "google.cloud.compute.v1.SslPolicies.Get", + "fullName": "google.cloud.compute.v1.StoragePools.ListDisks", "service": { - "fullName": "google.cloud.compute.v1.SslPolicies", - "shortName": "SslPolicies" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "Get" + "shortName": "ListDisks" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetSslPolicyRequest" + "type": "google.cloud.compute_v1.types.ListDisksStoragePoolsRequest" }, { "name": "project", "type": "str" }, { - "name": "ssl_policy", + "name": "zone", + "type": "str" + }, + { + "name": "storage_pool", "type": "str" }, { @@ -55453,22 +57277,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.SslPolicy", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.storage_pools.pagers.ListDisksPager", + "shortName": "list_disks" }, - "description": "Sample for Get", - "file": "compute_v1_generated_ssl_policies_get_sync.py", + "description": "Sample for ListDisks", + "file": "compute_v1_generated_storage_pools_list_disks_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslPolicies_Get_sync", + "regionTag": "compute_v1_generated_StoragePools_ListDisks_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -55478,51 +57302,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 55, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_policies_get_sync.py" + "title": "compute_v1_generated_storage_pools_list_disks_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslPoliciesClient", - "shortName": "SslPoliciesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslPoliciesClient.insert", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.list", "method": { - "fullName": "google.cloud.compute.v1.SslPolicies.Insert", + "fullName": "google.cloud.compute.v1.StoragePools.List", "service": { - "fullName": "google.cloud.compute.v1.SslPolicies", - "shortName": "SslPolicies" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "Insert" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertSslPolicyRequest" + "type": "google.cloud.compute_v1.types.ListStoragePoolsRequest" }, { "name": "project", "type": "str" }, { - "name": "ssl_policy_resource", - "type": "google.cloud.compute_v1.types.SslPolicy" + "name": "zone", + "type": "str" }, { "name": "retry", @@ -55537,22 +57361,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.storage_pools.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_ssl_policies_insert_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_storage_pools_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslPolicies_Insert_sync", + "regionTag": "compute_v1_generated_StoragePools_List_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -55562,48 +57386,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_policies_insert_sync.py" + "title": "compute_v1_generated_storage_pools_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslPoliciesClient", - "shortName": "SslPoliciesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslPoliciesClient.list_available_features", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.SslPolicies.ListAvailableFeatures", + "fullName": "google.cloud.compute.v1.StoragePools.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.SslPolicies", - "shortName": "SslPolicies" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "ListAvailableFeatures" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListAvailableFeaturesSslPoliciesRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyStoragePoolRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -55617,22 +57453,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.types.SslPoliciesListAvailableFeaturesResponse", - "shortName": "list_available_features" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for ListAvailableFeatures", - "file": "compute_v1_generated_ssl_policies_list_available_features_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_storage_pools_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslPolicies_ListAvailableFeatures_sync", + "regionTag": "compute_v1_generated_StoragePools_SetIamPolicy_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -55642,48 +57478,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_policies_list_available_features_sync.py" + "title": "compute_v1_generated_storage_pools_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslPoliciesClient", - "shortName": "SslPoliciesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslPoliciesClient.list", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.SslPolicies.List", + "fullName": "google.cloud.compute.v1.StoragePools.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.SslPolicies", - "shortName": "SslPolicies" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "List" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListSslPoliciesRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsStoragePoolRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -55697,22 +57545,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.compute_v1.services.ssl_policies.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for List", - "file": "compute_v1_generated_ssl_policies_list_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_storage_pools_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslPolicies_List_sync", + "regionTag": "compute_v1_generated_StoragePools_TestIamPermissions_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -55722,55 +57570,59 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_policies_list_sync.py" + "title": "compute_v1_generated_storage_pools_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.SslPoliciesClient", - "shortName": "SslPoliciesClient" + "fullName": "google.cloud.compute_v1.StoragePoolsClient", + "shortName": "StoragePoolsClient" }, - "fullName": "google.cloud.compute_v1.SslPoliciesClient.patch", + "fullName": "google.cloud.compute_v1.StoragePoolsClient.update", "method": { - "fullName": "google.cloud.compute.v1.SslPolicies.Patch", + "fullName": "google.cloud.compute.v1.StoragePools.Update", "service": { - "fullName": "google.cloud.compute.v1.SslPolicies", - "shortName": "SslPolicies" + "fullName": "google.cloud.compute.v1.StoragePools", + "shortName": "StoragePools" }, - "shortName": "Patch" + "shortName": "Update" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchSslPolicyRequest" + "type": "google.cloud.compute_v1.types.UpdateStoragePoolRequest" }, { "name": "project", "type": "str" }, { - "name": "ssl_policy", + "name": "zone", "type": "str" }, { - "name": "ssl_policy_resource", - "type": "google.cloud.compute_v1.types.SslPolicy" + "name": "storage_pool", + "type": "str" + }, + { + "name": "storage_pool_resource", + "type": "google.cloud.compute_v1.types.StoragePool" }, { "name": "retry", @@ -55786,21 +57638,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "update" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_ssl_policies_patch_sync.py", + "description": "Sample for Update", + "file": "compute_v1_generated_storage_pools_update_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_SslPolicies_Patch_sync", + "regionTag": "compute_v1_generated_StoragePools_Update_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -55810,22 +57662,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_ssl_policies_patch_sync.py" + "title": "compute_v1_generated_storage_pools_update_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-compute/scripts/fixup_compute_v1_keywords.py b/packages/google-cloud-compute/scripts/fixup_compute_v1_keywords.py index fd6ebd87a7ff..16368ebf4939 100644 --- a/packages/google-cloud-compute/scripts/fixup_compute_v1_keywords.py +++ b/packages/google-cloud-compute/scripts/fixup_compute_v1_keywords.py @@ -56,6 +56,7 @@ class computeCallTransformer(cst.CSTTransformer): 'attach_disk': ('attached_disk_resource', 'instance', 'project', 'zone', 'force_attach', 'request_id', ), 'attach_network_endpoints': ('global_network_endpoint_groups_attach_endpoints_request_resource', 'network_endpoint_group', 'project', 'request_id', ), 'bulk_insert': ('bulk_insert_disk_resource_resource', 'project', 'zone', 'request_id', ), + 'cancel': ('instance_group_manager', 'project', 'resize_request', 'zone', 'request_id', ), 'clone_rules': ('firewall_policy', 'request_id', 'source_firewall_policy', ), 'create_instances': ('instance_group_manager', 'instance_group_managers_create_instances_request_resource', 'project', 'zone', 'request_id', ), 'create_snapshot': ('disk', 'project', 'snapshot_resource', 'zone', 'guest_flush', 'request_id', ), @@ -97,6 +98,7 @@ class computeCallTransformer(cst.CSTTransformer): 'list': ('project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), 'list_associations': ('target_resource', ), 'list_available_features': ('project', 'region', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_disks': ('project', 'storage_pool', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), 'list_errors': ('instance_group_manager', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), 'list_instances': ('instance_group', 'instance_groups_list_instances_request_resource', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), 'list_managed_instances': ('instance_group_manager', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py index e02f6808df1c..e8f82aa94193 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_accelerator_types.py @@ -1018,6 +1018,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListAcceleratorTypesRequest, ): @@ -1397,6 +1433,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetAcceleratorTypeRequest): transport_class = transports.AcceleratorTypesRestTransport @@ -1697,6 +1769,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListAcceleratorTypesRequest): transport_class = transports.AcceleratorTypesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py index 45d0ff051b45..657dd52a7c1c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_addresses.py @@ -976,6 +976,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListAddressesRequest, ): @@ -1372,6 +1408,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteAddressRequest): transport_class = transports.AddressesRestTransport @@ -1678,6 +1754,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteAddressRequest): transport_class = transports.AddressesRestTransport @@ -2000,6 +2116,42 @@ def test_get_rest(request_type): assert response.users == ["users_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetAddressRequest): transport_class = transports.AddressesRestTransport @@ -2415,6 +2567,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertAddressRequest): transport_class = transports.AddressesRestTransport @@ -2803,6 +2995,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertAddressRequest): transport_class = transports.AddressesRestTransport @@ -3088,6 +3320,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListAddressesRequest): transport_class = transports.AddressesRestTransport @@ -3558,6 +3826,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_move_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move] = mock_rpc + + request = {} + client.move(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_rest_required_fields(request_type=compute.MoveAddressRequest): transport_class = transports.AddressesRestTransport @@ -3954,6 +4262,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_move_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move] = mock_rpc + + request = {} + client.move_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_unary_rest_required_fields(request_type=compute.MoveAddressRequest): transport_class = transports.AddressesRestTransport @@ -4369,6 +4717,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields(request_type=compute.SetLabelsAddressRequest): transport_class = transports.AddressesRestTransport @@ -4764,6 +5152,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsAddressRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py index c6d93e2dff1e..f398b3bb4eb5 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_autoscalers.py @@ -984,6 +984,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListAutoscalersRequest, ): @@ -1385,6 +1421,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteAutoscalerRequest): transport_class = transports.AutoscalersRestTransport @@ -1695,6 +1771,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteAutoscalerRequest, ): @@ -2007,6 +2123,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetAutoscalerRequest): transport_class = transports.AutoscalersRestTransport @@ -2446,6 +2598,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertAutoscalerRequest): transport_class = transports.AutoscalersRestTransport @@ -2864,6 +3056,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertAutoscalerRequest, ): @@ -3159,6 +3391,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListAutoscalersRequest): transport_class = transports.AutoscalersRestTransport @@ -3662,6 +3930,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchAutoscalerRequest): transport_class = transports.AutoscalersRestTransport @@ -4088,6 +4396,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchAutoscalerRequest): transport_class = transports.AutoscalersRestTransport @@ -4536,6 +4884,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateAutoscalerRequest): transport_class = transports.AutoscalersRestTransport @@ -4964,6 +5352,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateAutoscalerRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py index 51e1ea07c092..4c44cbc2d73d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py @@ -1114,6 +1114,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_signed_url_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_signed_url_key in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_signed_url_key + ] = mock_rpc + + request = {} + client.add_signed_url_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_signed_url_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_signed_url_key_rest_required_fields( request_type=compute.AddSignedUrlKeyBackendBucketRequest, ): @@ -1494,6 +1538,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_signed_url_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_signed_url_key in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_signed_url_key + ] = mock_rpc + + request = {} + client.add_signed_url_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_signed_url_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_signed_url_key_unary_rest_required_fields( request_type=compute.AddSignedUrlKeyBackendBucketRequest, ): @@ -1821,6 +1909,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteBackendBucketRequest): transport_class = transports.BackendBucketsRestTransport @@ -2120,6 +2248,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteBackendBucketRequest, ): @@ -2443,6 +2611,51 @@ def test_delete_signed_url_key_rest(request_type): assert response.zone == "zone_value" +def test_delete_signed_url_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_signed_url_key + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_signed_url_key + ] = mock_rpc + + request = {} + client.delete_signed_url_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_signed_url_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_signed_url_key_rest_required_fields( request_type=compute.DeleteSignedUrlKeyBackendBucketRequest, ): @@ -2769,6 +2982,51 @@ def test_delete_signed_url_key_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_signed_url_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_signed_url_key + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_signed_url_key + ] = mock_rpc + + request = {} + client.delete_signed_url_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_signed_url_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_signed_url_key_unary_rest_required_fields( request_type=compute.DeleteSignedUrlKeyBackendBucketRequest, ): @@ -3095,6 +3353,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetBackendBucketRequest): transport_class = transports.BackendBucketsRestTransport @@ -3378,6 +3672,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyBackendBucketRequest, ): @@ -3814,6 +4144,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertBackendBucketRequest): transport_class = transports.BackendBucketsRestTransport @@ -4227,6 +4597,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertBackendBucketRequest, ): @@ -4515,6 +4925,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListBackendBucketsRequest): transport_class = transports.BackendBucketsRestTransport @@ -5011,6 +5457,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchBackendBucketRequest): transport_class = transports.BackendBucketsRestTransport @@ -5431,6 +5917,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchBackendBucketRequest, ): @@ -5840,6 +6366,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_edge_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_edge_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_edge_security_policy + ] = mock_rpc + + request = {} + client.set_edge_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_edge_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_edge_security_policy_rest_required_fields( request_type=compute.SetEdgeSecurityPolicyBackendBucketRequest, ): @@ -6228,6 +6799,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_edge_security_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_edge_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_edge_security_policy + ] = mock_rpc + + request = {} + client.set_edge_security_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_edge_security_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_edge_security_policy_unary_rest_required_fields( request_type=compute.SetEdgeSecurityPolicyBackendBucketRequest, ): @@ -6674,6 +7290,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyBackendBucketRequest, ): @@ -7039,6 +7691,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsBackendBucketRequest, ): @@ -7483,6 +8175,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateBackendBucketRequest): transport_class = transports.BackendBucketsRestTransport @@ -7903,6 +8635,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateBackendBucketRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py index ffd402f2efa7..0ef96439d9d6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py @@ -1128,6 +1128,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_signed_url_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_signed_url_key in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_signed_url_key + ] = mock_rpc + + request = {} + client.add_signed_url_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_signed_url_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_signed_url_key_rest_required_fields( request_type=compute.AddSignedUrlKeyBackendServiceRequest, ): @@ -1508,6 +1552,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_signed_url_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_signed_url_key in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_signed_url_key + ] = mock_rpc + + request = {} + client.add_signed_url_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_signed_url_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_signed_url_key_unary_rest_required_fields( request_type=compute.AddSignedUrlKeyBackendServiceRequest, ): @@ -1801,6 +1889,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListBackendServicesRequest, ): @@ -2204,6 +2328,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteBackendServiceRequest): transport_class = transports.BackendServicesRestTransport @@ -2503,6 +2667,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteBackendServiceRequest, ): @@ -2826,6 +3030,51 @@ def test_delete_signed_url_key_rest(request_type): assert response.zone == "zone_value" +def test_delete_signed_url_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_signed_url_key + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_signed_url_key + ] = mock_rpc + + request = {} + client.delete_signed_url_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_signed_url_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_signed_url_key_rest_required_fields( request_type=compute.DeleteSignedUrlKeyBackendServiceRequest, ): @@ -3153,6 +3402,51 @@ def test_delete_signed_url_key_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_signed_url_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_signed_url_key + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_signed_url_key + ] = mock_rpc + + request = {} + client.delete_signed_url_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_signed_url_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_signed_url_key_unary_rest_required_fields( request_type=compute.DeleteSignedUrlKeyBackendServiceRequest, ): @@ -3510,6 +3804,42 @@ def test_get_rest(request_type): assert response.timeout_sec == 1185 +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetBackendServiceRequest): transport_class = transports.BackendServicesRestTransport @@ -3865,6 +4195,42 @@ def get_message_fields(field): assert response.kind == "kind_value" +def test_get_health_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_health in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_health] = mock_rpc + + request = {} + client.get_health(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_health(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_health_rest_required_fields( request_type=compute.GetHealthBackendServiceRequest, ): @@ -4158,6 +4524,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyBackendServiceRequest, ): @@ -4716,6 +5118,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertBackendServiceRequest): transport_class = transports.BackendServicesRestTransport @@ -5251,6 +5693,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertBackendServiceRequest, ): @@ -5539,6 +6021,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListBackendServicesRequest): transport_class = transports.BackendServicesRestTransport @@ -5886,6 +6404,42 @@ def test_list_usable_rest(request_type): assert response.self_link == "self_link_value" +def test_list_usable_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_usable in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_usable] = mock_rpc + + request = {} + client.list_usable(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_usable(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_usable_rest_required_fields( request_type=compute.ListUsableBackendServicesRequest, ): @@ -6506,6 +7060,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchBackendServiceRequest): transport_class = transports.BackendServicesRestTransport @@ -7048,6 +7642,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchBackendServiceRequest, ): @@ -7457,6 +8091,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_edge_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_edge_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_edge_security_policy + ] = mock_rpc + + request = {} + client.set_edge_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_edge_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_edge_security_policy_rest_required_fields( request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest, ): @@ -7845,6 +8524,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_edge_security_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_edge_security_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_edge_security_policy + ] = mock_rpc + + request = {} + client.set_edge_security_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_edge_security_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_edge_security_policy_unary_rest_required_fields( request_type=compute.SetEdgeSecurityPolicyBackendServiceRequest, ): @@ -8291,6 +9015,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyBackendServiceRequest, ): @@ -8698,6 +9458,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_rest_required_fields( request_type=compute.SetSecurityPolicyBackendServiceRequest, ): @@ -9085,6 +9889,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_security_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_unary_rest_required_fields( request_type=compute.SetSecurityPolicyBackendServiceRequest, ): @@ -9452,6 +10300,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsBackendServiceRequest, ): @@ -10019,6 +10907,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateBackendServiceRequest): transport_class = transports.BackendServicesRestTransport @@ -10561,6 +11489,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateBackendServiceRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py index 089d33407e55..db2b9a2609c2 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disk_types.py @@ -971,6 +971,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListDiskTypesRequest, ): @@ -1343,6 +1379,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetDiskTypeRequest): transport_class = transports.DiskTypesRestTransport @@ -1633,6 +1705,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListDiskTypesRequest): transport_class = transports.DiskTypesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py index 996af6afc0b9..604b189a445e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py @@ -1074,6 +1074,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_resource_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_resource_policies + ] = mock_rpc + + request = {} + client.add_resource_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_resource_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_resource_policies_rest_required_fields( request_type=compute.AddResourcePoliciesDiskRequest, ): @@ -1473,6 +1518,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_resource_policies_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_resource_policies + ] = mock_rpc + + request = {} + client.add_resource_policies_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_resource_policies_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_resource_policies_unary_rest_required_fields( request_type=compute.AddResourcePoliciesDiskRequest, ): @@ -1775,6 +1865,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListDisksRequest, ): @@ -2249,6 +2375,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_bulk_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.bulk_insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.bulk_insert] = mock_rpc + + request = {} + client.bulk_insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_bulk_insert_rest_required_fields(request_type=compute.BulkInsertDiskRequest): transport_class = transports.DisksRestTransport @@ -2630,6 +2796,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_bulk_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.bulk_insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.bulk_insert] = mock_rpc + + request = {} + client.bulk_insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_bulk_insert_unary_rest_required_fields( request_type=compute.BulkInsertDiskRequest, ): @@ -3068,6 +3274,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_create_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + + request = {} + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_snapshot_rest_required_fields( request_type=compute.CreateSnapshotDiskRequest, ): @@ -3499,6 +3745,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_create_snapshot_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + + request = {} + client.create_snapshot_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_snapshot_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_snapshot_unary_rest_required_fields( request_type=compute.CreateSnapshotDiskRequest, ): @@ -3841,6 +4127,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteDiskRequest): transport_class = transports.DisksRestTransport @@ -4143,6 +4469,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteDiskRequest): transport_class = transports.DisksRestTransport @@ -4442,6 +4808,7 @@ def test_get_rest(request_type): source_snapshot_id="source_snapshot_id_value", source_storage_object="source_storage_object_value", status="status_value", + storage_pool="storage_pool_value", type_="type__value", users=["users_value"], zone="zone_value", @@ -4502,11 +4869,48 @@ def test_get_rest(request_type): assert response.source_snapshot_id == "source_snapshot_id_value" assert response.source_storage_object == "source_storage_object_value" assert response.status == "status_value" + assert response.storage_pool == "storage_pool_value" assert response.type_ == "type__value" assert response.users == ["users_value"] assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetDiskRequest): transport_class = transports.DisksRestTransport @@ -4791,6 +5195,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyDiskRequest, ): @@ -5116,6 +5556,7 @@ def test_insert_rest(request_type): "source_snapshot_id": "source_snapshot_id_value", "source_storage_object": "source_storage_object_value", "status": "status_value", + "storage_pool": "storage_pool_value", "type_": "type__value", "users": ["users_value1", "users_value2"], "zone": "zone_value", @@ -5254,6 +5695,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertDiskRequest): transport_class = transports.DisksRestTransport @@ -5578,6 +6059,7 @@ def test_insert_unary_rest(request_type): "source_snapshot_id": "source_snapshot_id_value", "source_storage_object": "source_storage_object_value", "status": "status_value", + "storage_pool": "storage_pool_value", "type_": "type__value", "users": ["users_value1", "users_value2"], "zone": "zone_value", @@ -5694,6 +6176,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertDiskRequest): transport_class = transports.DisksRestTransport @@ -5989,6 +6511,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListDisksRequest): transport_class = transports.DisksRestTransport @@ -6462,6 +7020,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_remove_resource_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_resource_policies + ] = mock_rpc + + request = {} + client.remove_resource_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_resource_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_resource_policies_rest_required_fields( request_type=compute.RemoveResourcePoliciesDiskRequest, ): @@ -6861,6 +7464,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_remove_resource_policies_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_resource_policies + ] = mock_rpc + + request = {} + client.remove_resource_policies_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_resource_policies_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_resource_policies_unary_rest_required_fields( request_type=compute.RemoveResourcePoliciesDiskRequest, ): @@ -7271,6 +7919,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_resize_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_rest_required_fields(request_type=compute.ResizeDiskRequest): transport_class = transports.DisksRestTransport @@ -7651,6 +8339,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_resize_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_unary_rest_required_fields(request_type=compute.ResizeDiskRequest): transport_class = transports.DisksRestTransport @@ -8093,6 +8821,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyDiskRequest, ): @@ -8510,6 +9274,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields(request_type=compute.SetLabelsDiskRequest): transport_class = transports.DisksRestTransport @@ -8903,6 +9707,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsDiskRequest, ): @@ -9326,6 +10170,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_start_async_replication_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.start_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.start_async_replication + ] = mock_rpc + + request = {} + client.start_async_replication(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start_async_replication(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_start_async_replication_rest_required_fields( request_type=compute.StartAsyncReplicationDiskRequest, ): @@ -9725,6 +10614,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_start_async_replication_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.start_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.start_async_replication + ] = mock_rpc + + request = {} + client.start_async_replication_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start_async_replication_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_start_async_replication_unary_rest_required_fields( request_type=compute.StartAsyncReplicationDiskRequest, ): @@ -10061,6 +10995,51 @@ def test_stop_async_replication_rest(request_type): assert response.zone == "zone_value" +def test_stop_async_replication_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stop_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_async_replication + ] = mock_rpc + + request = {} + client.stop_async_replication(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_async_replication(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_async_replication_rest_required_fields( request_type=compute.StopAsyncReplicationDiskRequest, ): @@ -10367,6 +11346,51 @@ def test_stop_async_replication_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_stop_async_replication_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stop_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_async_replication + ] = mock_rpc + + request = {} + client.stop_async_replication_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_async_replication_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_async_replication_unary_rest_required_fields( request_type=compute.StopAsyncReplicationDiskRequest, ): @@ -10780,6 +11804,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_stop_group_async_replication_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stop_group_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_group_async_replication + ] = mock_rpc + + request = {} + client.stop_group_async_replication(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_group_async_replication(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_group_async_replication_rest_required_fields( request_type=compute.StopGroupAsyncReplicationDiskRequest, ): @@ -11172,6 +12241,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_stop_group_async_replication_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stop_group_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_group_async_replication + ] = mock_rpc + + request = {} + client.stop_group_async_replication_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_group_async_replication_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_group_async_replication_unary_rest_required_fields( request_type=compute.StopGroupAsyncReplicationDiskRequest, ): @@ -11539,6 +12653,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsDiskRequest, ): @@ -11872,6 +13026,7 @@ def test_update_rest(request_type): "source_snapshot_id": "source_snapshot_id_value", "source_storage_object": "source_storage_object_value", "status": "status_value", + "storage_pool": "storage_pool_value", "type_": "type__value", "users": ["users_value1", "users_value2"], "zone": "zone_value", @@ -12010,6 +13165,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateDiskRequest): transport_class = transports.DisksRestTransport @@ -12343,6 +13538,7 @@ def test_update_unary_rest(request_type): "source_snapshot_id": "source_snapshot_id_value", "source_storage_object": "source_storage_object_value", "status": "status_value", + "storage_pool": "storage_pool_value", "type_": "type__value", "users": ["users_value1", "users_value2"], "zone": "zone_value", @@ -12459,6 +13655,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields(request_type=compute.UpdateDiskRequest): transport_class = transports.DisksRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py index 131857db1106..f3e06ed84250 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py @@ -1085,6 +1085,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteExternalVpnGatewayRequest, ): @@ -1386,6 +1426,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteExternalVpnGatewayRequest, ): @@ -1681,6 +1761,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetExternalVpnGatewayRequest): transport_class = transports.ExternalVpnGatewaysRestTransport @@ -2095,6 +2211,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertExternalVpnGatewayRequest, ): @@ -2490,6 +2646,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertExternalVpnGatewayRequest, ): @@ -2780,6 +2976,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListExternalVpnGatewaysRequest): transport_class = transports.ExternalVpnGatewaysRestTransport @@ -3242,6 +3474,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsExternalVpnGatewayRequest, ): @@ -3628,6 +3900,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsExternalVpnGatewayRequest, ): @@ -3993,6 +4305,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsExternalVpnGatewayRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py index ad367f8694ff..9cb663379015 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewall_policies.py @@ -1143,6 +1143,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_association] = mock_rpc + + request = {} + client.add_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_association_rest_required_fields( request_type=compute.AddAssociationFirewallPolicyRequest, ): @@ -1539,6 +1579,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_association_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_association] = mock_rpc + + request = {} + client.add_association_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_association_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_association_unary_rest_required_fields( request_type=compute.AddAssociationFirewallPolicyRequest, ): @@ -2001,6 +2081,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_rest_required_fields( request_type=compute.AddRuleFirewallPolicyRequest, ): @@ -2431,6 +2551,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_unary_rest_required_fields( request_type=compute.AddRuleFirewallPolicyRequest, ): @@ -2755,6 +2915,46 @@ def test_clone_rules_rest(request_type): assert response.zone == "zone_value" +def test_clone_rules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.clone_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.clone_rules] = mock_rpc + + request = {} + client.clone_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.clone_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_clone_rules_rest_required_fields( request_type=compute.CloneRulesFirewallPolicyRequest, ): @@ -3055,6 +3255,46 @@ def test_clone_rules_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_clone_rules_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.clone_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.clone_rules] = mock_rpc + + request = {} + client.clone_rules_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.clone_rules_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_clone_rules_unary_rest_required_fields( request_type=compute.CloneRulesFirewallPolicyRequest, ): @@ -3377,6 +3617,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteFirewallPolicyRequest): transport_class = transports.FirewallPoliciesRestTransport @@ -3662,6 +3942,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteFirewallPolicyRequest, ): @@ -3953,6 +4273,42 @@ def test_get_rest(request_type): assert response.short_name == "short_name_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetFirewallPolicyRequest): transport_class = transports.FirewallPoliciesRestTransport @@ -4226,6 +4582,42 @@ def test_get_association_rest(request_type): assert response.short_name == "short_name_value" +def test_get_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_association] = mock_rpc + + request = {} + client.get_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_association_rest_required_fields( request_type=compute.GetAssociationFirewallPolicyRequest, ): @@ -4499,6 +4891,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyFirewallPolicyRequest, ): @@ -4792,6 +5220,42 @@ def test_get_rule_rest(request_type): assert response.tls_inspect is True +def test_get_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_rule] = mock_rpc + + request = {} + client.get_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rule_rest_required_fields( request_type=compute.GetRuleFirewallPolicyRequest, ): @@ -5262,6 +5726,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertFirewallPolicyRequest): transport_class = transports.FirewallPoliciesRestTransport @@ -5746,6 +6250,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertFirewallPolicyRequest, ): @@ -6057,6 +6601,42 @@ def test_list_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_rest_interceptors(null_interceptor): transport = transports.FirewallPoliciesRestTransport( @@ -6239,6 +6819,44 @@ def test_list_associations_rest(request_type): assert response.kind == "kind_value" +def test_list_associations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_associations in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_associations + ] = mock_rpc + + request = {} + client.list_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_associations_rest_interceptors(null_interceptor): transport = transports.FirewallPoliciesRestTransport( @@ -6410,6 +7028,46 @@ def test_move_rest(request_type): assert response.zone == "zone_value" +def test_move_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move] = mock_rpc + + request = {} + client.move(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_rest_required_fields(request_type=compute.MoveFirewallPolicyRequest): transport_class = transports.FirewallPoliciesRestTransport @@ -6727,6 +7385,46 @@ def test_move_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_move_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move] = mock_rpc + + request = {} + client.move_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_unary_rest_required_fields( request_type=compute.MoveFirewallPolicyRequest, ): @@ -7227,6 +7925,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchFirewallPolicyRequest): transport_class = transports.FirewallPoliciesRestTransport @@ -7694,6 +8432,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchFirewallPolicyRequest, ): @@ -8154,6 +8932,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_rest_required_fields( request_type=compute.PatchRuleFirewallPolicyRequest, ): @@ -8594,6 +9412,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_unary_rest_required_fields( request_type=compute.PatchRuleFirewallPolicyRequest, ): @@ -8928,6 +9786,50 @@ def test_remove_association_rest(request_type): assert response.zone == "zone_value" +def test_remove_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_association in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_association + ] = mock_rpc + + request = {} + client.remove_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_association_rest_required_fields( request_type=compute.RemoveAssociationFirewallPolicyRequest, ): @@ -9228,6 +10130,50 @@ def test_remove_association_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_remove_association_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_association in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_association + ] = mock_rpc + + request = {} + client.remove_association_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_association_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_association_unary_rest_required_fields( request_type=compute.RemoveAssociationFirewallPolicyRequest, ): @@ -9550,6 +10496,46 @@ def test_remove_rule_rest(request_type): assert response.zone == "zone_value" +def test_remove_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_rest_required_fields( request_type=compute.RemoveRuleFirewallPolicyRequest, ): @@ -9850,6 +10836,46 @@ def test_remove_rule_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_remove_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_unary_rest_required_fields( request_type=compute.RemoveRuleFirewallPolicyRequest, ): @@ -10293,6 +11319,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyFirewallPolicyRequest, ): @@ -10651,6 +11713,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsFirewallPolicyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py index 9dd6a1db8bae..49f18755cb2f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_firewalls.py @@ -1010,6 +1010,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteFirewallRequest): transport_class = transports.FirewallsRestTransport @@ -1305,6 +1345,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteFirewallRequest): transport_class = transports.FirewallsRestTransport @@ -1610,6 +1690,42 @@ def test_get_rest(request_type): assert response.target_tags == ["target_tags_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetFirewallRequest): transport_class = transports.FirewallsRestTransport @@ -2032,6 +2148,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertFirewallRequest): transport_class = transports.FirewallsRestTransport @@ -2435,6 +2591,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertFirewallRequest): transport_class = transports.FirewallsRestTransport @@ -2717,6 +2913,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListFirewallsRequest): transport_class = transports.FirewallsRestTransport @@ -3201,6 +3433,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchFirewallRequest): transport_class = transports.FirewallsRestTransport @@ -3611,6 +3883,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchFirewallRequest): transport_class = transports.FirewallsRestTransport @@ -4043,6 +4355,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateFirewallRequest): transport_class = transports.FirewallsRestTransport @@ -4453,6 +4805,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields(request_type=compute.UpdateFirewallRequest): transport_class = transports.FirewallsRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py index 00abe489f29f..ebd1093b7206 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_forwarding_rules.py @@ -1019,6 +1019,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListForwardingRulesRequest, ): @@ -1426,6 +1462,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteForwardingRuleRequest): transport_class = transports.ForwardingRulesRestTransport @@ -1744,6 +1820,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteForwardingRuleRequest, ): @@ -2036,6 +2152,7 @@ def test_get_rest(request_type): description="description_value", fingerprint="fingerprint_value", id=205, + ip_collection="ip_collection_value", ip_version="ip_version_value", is_mirroring_collector=True, kind="kind_value", @@ -2082,6 +2199,7 @@ def test_get_rest(request_type): assert response.description == "description_value" assert response.fingerprint == "fingerprint_value" assert response.id == 205 + assert response.ip_collection == "ip_collection_value" assert response.ip_version == "ip_version_value" assert response.is_mirroring_collector is True assert response.kind == "kind_value" @@ -2104,6 +2222,42 @@ def test_get_rest(request_type): assert response.target == "target_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetForwardingRuleRequest): transport_class = transports.ForwardingRulesRestTransport @@ -2385,6 +2539,7 @@ def test_insert_rest(request_type): "description": "description_value", "fingerprint": "fingerprint_value", "id": 205, + "ip_collection": "ip_collection_value", "ip_version": "ip_version_value", "is_mirroring_collector": True, "kind": "kind_value", @@ -2558,6 +2713,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertForwardingRuleRequest): transport_class = transports.ForwardingRulesRestTransport @@ -2832,6 +3027,7 @@ def test_insert_unary_rest(request_type): "description": "description_value", "fingerprint": "fingerprint_value", "id": 205, + "ip_collection": "ip_collection_value", "ip_version": "ip_version_value", "is_mirroring_collector": True, "kind": "kind_value", @@ -2983,6 +3179,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertForwardingRuleRequest, ): @@ -3278,6 +3514,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListForwardingRulesRequest): transport_class = transports.ForwardingRulesRestTransport @@ -3621,6 +3893,7 @@ def test_patch_rest(request_type): "description": "description_value", "fingerprint": "fingerprint_value", "id": 205, + "ip_collection": "ip_collection_value", "ip_version": "ip_version_value", "is_mirroring_collector": True, "kind": "kind_value", @@ -3794,6 +4067,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchForwardingRuleRequest): transport_class = transports.ForwardingRulesRestTransport @@ -4087,6 +4400,7 @@ def test_patch_unary_rest(request_type): "description": "description_value", "fingerprint": "fingerprint_value", "id": 205, + "ip_collection": "ip_collection_value", "ip_version": "ip_version_value", "is_mirroring_collector": True, "kind": "kind_value", @@ -4238,6 +4552,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchForwardingRuleRequest, ): @@ -4663,6 +5017,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsForwardingRuleRequest, ): @@ -5062,6 +5456,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsForwardingRuleRequest, ): @@ -5482,6 +5916,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_target_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_target in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_target] = mock_rpc + + request = {} + client.set_target(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_target(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_target_rest_required_fields( request_type=compute.SetTargetForwardingRuleRequest, ): @@ -5880,6 +6354,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_target_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_target in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_target] = mock_rpc + + request = {} + client.set_target_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_target_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_target_unary_rest_required_fields( request_type=compute.SetTargetForwardingRuleRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py index 586eac10d3aa..892822e6f893 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_addresses.py @@ -1053,6 +1053,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteGlobalAddressRequest): transport_class = transports.GlobalAddressesRestTransport @@ -1352,6 +1392,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteGlobalAddressRequest, ): @@ -1669,6 +1749,42 @@ def test_get_rest(request_type): assert response.users == ["users_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetGlobalAddressRequest): transport_class = transports.GlobalAddressesRestTransport @@ -2077,6 +2193,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertGlobalAddressRequest): transport_class = transports.GlobalAddressesRestTransport @@ -2462,6 +2618,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertGlobalAddressRequest, ): @@ -2746,6 +2942,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListGlobalAddressesRequest): transport_class = transports.GlobalAddressesRestTransport @@ -3209,6 +3441,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_move_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move] = mock_rpc + + request = {} + client.move(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_rest_required_fields(request_type=compute.MoveGlobalAddressRequest): transport_class = transports.GlobalAddressesRestTransport @@ -3598,6 +3870,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_move_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move] = mock_rpc + + request = {} + client.move_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_unary_rest_required_fields(request_type=compute.MoveGlobalAddressRequest): transport_class = transports.GlobalAddressesRestTransport @@ -4006,6 +4318,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsGlobalAddressRequest, ): @@ -4392,6 +4744,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsGlobalAddressRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py index f304f807c811..078b5cbecf66 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py @@ -1087,6 +1087,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteGlobalForwardingRuleRequest, ): @@ -1388,6 +1428,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteGlobalForwardingRuleRequest, ): @@ -1661,6 +1741,7 @@ def test_get_rest(request_type): description="description_value", fingerprint="fingerprint_value", id=205, + ip_collection="ip_collection_value", ip_version="ip_version_value", is_mirroring_collector=True, kind="kind_value", @@ -1707,6 +1788,7 @@ def test_get_rest(request_type): assert response.description == "description_value" assert response.fingerprint == "fingerprint_value" assert response.id == 205 + assert response.ip_collection == "ip_collection_value" assert response.ip_version == "ip_version_value" assert response.is_mirroring_collector is True assert response.kind == "kind_value" @@ -1729,6 +1811,42 @@ def test_get_rest(request_type): assert response.target == "target_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetGlobalForwardingRuleRequest): transport_class = transports.GlobalForwardingRulesRestTransport @@ -1995,6 +2113,7 @@ def test_insert_rest(request_type): "description": "description_value", "fingerprint": "fingerprint_value", "id": 205, + "ip_collection": "ip_collection_value", "ip_version": "ip_version_value", "is_mirroring_collector": True, "kind": "kind_value", @@ -2168,6 +2287,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertGlobalForwardingRuleRequest, ): @@ -2437,6 +2596,7 @@ def test_insert_unary_rest(request_type): "description": "description_value", "fingerprint": "fingerprint_value", "id": 205, + "ip_collection": "ip_collection_value", "ip_version": "ip_version_value", "is_mirroring_collector": True, "kind": "kind_value", @@ -2588,6 +2748,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertGlobalForwardingRuleRequest, ): @@ -2876,6 +3076,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListGlobalForwardingRulesRequest, ): @@ -3206,6 +3442,7 @@ def test_patch_rest(request_type): "description": "description_value", "fingerprint": "fingerprint_value", "id": 205, + "ip_collection": "ip_collection_value", "ip_version": "ip_version_value", "is_mirroring_collector": True, "kind": "kind_value", @@ -3379,6 +3616,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchGlobalForwardingRuleRequest, ): @@ -3655,6 +3932,7 @@ def test_patch_unary_rest(request_type): "description": "description_value", "fingerprint": "fingerprint_value", "id": 205, + "ip_collection": "ip_collection_value", "ip_version": "ip_version_value", "is_mirroring_collector": True, "kind": "kind_value", @@ -3806,6 +4084,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchGlobalForwardingRuleRequest, ): @@ -4216,6 +4534,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsGlobalForwardingRuleRequest, ): @@ -4602,6 +4960,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsGlobalForwardingRuleRequest, ): @@ -5005,6 +5403,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_target_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_target in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_target] = mock_rpc + + request = {} + client.set_target(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_target(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_target_rest_required_fields( request_type=compute.SetTargetGlobalForwardingRuleRequest, ): @@ -5384,6 +5822,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_target_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_target in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_target] = mock_rpc + + request = {} + client.set_target_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_target_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_target_unary_rest_required_fields( request_type=compute.SetTargetGlobalForwardingRuleRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py index e2a5d22e9d4c..3d0260707211 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py @@ -1198,6 +1198,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_attach_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.attach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.attach_network_endpoints + ] = mock_rpc + + request = {} + client.attach_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.attach_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_attach_network_endpoints_rest_required_fields( request_type=compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, ): @@ -1609,6 +1654,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_attach_network_endpoints_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.attach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.attach_network_endpoints + ] = mock_rpc + + request = {} + client.attach_network_endpoints_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.attach_network_endpoints_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_attach_network_endpoints_unary_rest_required_fields( request_type=compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, ): @@ -1947,6 +2037,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteGlobalNetworkEndpointGroupRequest, ): @@ -2249,6 +2379,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteGlobalNetworkEndpointGroupRequest, ): @@ -2668,6 +2838,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_detach_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_network_endpoints + ] = mock_rpc + + request = {} + client.detach_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.detach_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_network_endpoints_rest_required_fields( request_type=compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, ): @@ -3079,6 +3294,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_detach_network_endpoints_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_network_endpoints + ] = mock_rpc + + request = {} + client.detach_network_endpoints_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.detach_network_endpoints_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_network_endpoints_unary_rest_required_fields( request_type=compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, ): @@ -3401,6 +3661,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetGlobalNetworkEndpointGroupRequest, ): @@ -3832,6 +4128,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertGlobalNetworkEndpointGroupRequest, ): @@ -4243,6 +4579,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertGlobalNetworkEndpointGroupRequest, ): @@ -4532,6 +4908,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListGlobalNetworkEndpointGroupsRequest, ): @@ -4881,6 +5293,47 @@ def test_list_network_endpoints_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_network_endpoints + ] = mock_rpc + + request = {} + client.list_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_network_endpoints_rest_required_fields( request_type=compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py index 0fda0b1144bf..8cac9c3cfcb9 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_operations.py @@ -1018,6 +1018,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListGlobalOperationsRequest, ): @@ -1371,6 +1407,42 @@ def test_delete_rest(request_type): assert isinstance(response, compute.DeleteGlobalOperationResponse) +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteGlobalOperationRequest): transport_class = transports.GlobalOperationsRestTransport @@ -1692,6 +1764,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetGlobalOperationRequest): transport_class = transports.GlobalOperationsRestTransport @@ -1975,6 +2083,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListGlobalOperationsRequest): transport_class = transports.GlobalOperationsRestTransport @@ -2358,6 +2502,42 @@ def test_wait_rest(request_type): assert response.zone == "zone_value" +def test_wait_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.wait in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.wait] = mock_rpc + + request = {} + client.wait(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.wait(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_wait_rest_required_fields(request_type=compute.WaitGlobalOperationRequest): transport_class = transports.GlobalOperationsRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py index 52b29ce0bb68..ccb5d29fa5d0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_organization_operations.py @@ -1057,6 +1057,42 @@ def test_delete_rest(request_type): assert isinstance(response, compute.DeleteGlobalOrganizationOperationResponse) +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteGlobalOrganizationOperationRequest, ): @@ -1375,6 +1411,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetGlobalOrganizationOperationRequest, ): @@ -1648,6 +1720,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_rest_interceptors(null_interceptor): transport = transports.GlobalOrganizationOperationsRestTransport( diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py index 4d6bdf022553..d11c0e265023 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py @@ -1109,6 +1109,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteGlobalPublicDelegatedPrefixeRequest, ): @@ -1411,6 +1451,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteGlobalPublicDelegatedPrefixeRequest, ): @@ -1674,6 +1754,7 @@ def test_get_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PublicDelegatedPrefix( + allocatable_prefix_length=2626, byoip_api_version="byoip_api_version_value", creation_timestamp="creation_timestamp_value", description="description_value", @@ -1682,6 +1763,7 @@ def test_get_rest(request_type): ip_cidr_range="ip_cidr_range_value", is_live_migration=True, kind="kind_value", + mode="mode_value", name="name_value", parent_prefix="parent_prefix_value", region="region_value", @@ -1702,6 +1784,7 @@ def test_get_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.PublicDelegatedPrefix) + assert response.allocatable_prefix_length == 2626 assert response.byoip_api_version == "byoip_api_version_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" @@ -1710,6 +1793,7 @@ def test_get_rest(request_type): assert response.ip_cidr_range == "ip_cidr_range_value" assert response.is_live_migration is True assert response.kind == "kind_value" + assert response.mode == "mode_value" assert response.name == "name_value" assert response.parent_prefix == "parent_prefix_value" assert response.region == "region_value" @@ -1717,6 +1801,42 @@ def test_get_rest(request_type): assert response.status == "status_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetGlobalPublicDelegatedPrefixeRequest, ): @@ -1974,6 +2094,7 @@ def test_insert_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["public_delegated_prefix_resource"] = { + "allocatable_prefix_length": 2626, "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", @@ -1982,14 +2103,17 @@ def test_insert_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "is_live_migration": True, "kind": "kind_value", + "mode": "mode_value", "name": "name_value", "parent_prefix": "parent_prefix_value", "public_delegated_sub_prefixs": [ { + "allocatable_prefix_length": 2626, "delegatee_project": "delegatee_project_value", "description": "description_value", "ip_cidr_range": "ip_cidr_range_value", "is_address": True, + "mode": "mode_value", "name": "name_value", "region": "region_value", "status": "status_value", @@ -2141,6 +2265,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertGlobalPublicDelegatedPrefixeRequest, ): @@ -2333,7 +2497,7 @@ def test_insert_rest_flattened(): mock_args = dict( project="project_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) mock_args.update(sample_request) @@ -2373,7 +2537,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): compute.InsertGlobalPublicDelegatedPrefixeRequest(), project="project_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) @@ -2400,6 +2564,7 @@ def test_insert_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["public_delegated_prefix_resource"] = { + "allocatable_prefix_length": 2626, "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", @@ -2408,14 +2573,17 @@ def test_insert_unary_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "is_live_migration": True, "kind": "kind_value", + "mode": "mode_value", "name": "name_value", "parent_prefix": "parent_prefix_value", "public_delegated_sub_prefixs": [ { + "allocatable_prefix_length": 2626, "delegatee_project": "delegatee_project_value", "description": "description_value", "ip_cidr_range": "ip_cidr_range_value", "is_address": True, + "mode": "mode_value", "name": "name_value", "region": "region_value", "status": "status_value", @@ -2545,6 +2713,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertGlobalPublicDelegatedPrefixeRequest, ): @@ -2737,7 +2945,7 @@ def test_insert_unary_rest_flattened(): mock_args = dict( project="project_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) mock_args.update(sample_request) @@ -2777,7 +2985,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): compute.InsertGlobalPublicDelegatedPrefixeRequest(), project="project_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) @@ -2834,6 +3042,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListGlobalPublicDelegatedPrefixesRequest, ): @@ -3154,6 +3398,7 @@ def test_patch_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} request_init["public_delegated_prefix_resource"] = { + "allocatable_prefix_length": 2626, "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", @@ -3162,14 +3407,17 @@ def test_patch_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "is_live_migration": True, "kind": "kind_value", + "mode": "mode_value", "name": "name_value", "parent_prefix": "parent_prefix_value", "public_delegated_sub_prefixs": [ { + "allocatable_prefix_length": 2626, "delegatee_project": "delegatee_project_value", "description": "description_value", "ip_cidr_range": "ip_cidr_range_value", "is_address": True, + "mode": "mode_value", "name": "name_value", "region": "region_value", "status": "status_value", @@ -3321,6 +3569,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchGlobalPublicDelegatedPrefixeRequest, ): @@ -3519,7 +3807,7 @@ def test_patch_rest_flattened(): project="project_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) mock_args.update(sample_request) @@ -3560,7 +3848,7 @@ def test_patch_rest_flattened_error(transport: str = "rest"): project="project_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) @@ -3587,6 +3875,7 @@ def test_patch_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} request_init["public_delegated_prefix_resource"] = { + "allocatable_prefix_length": 2626, "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", @@ -3595,14 +3884,17 @@ def test_patch_unary_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "is_live_migration": True, "kind": "kind_value", + "mode": "mode_value", "name": "name_value", "parent_prefix": "parent_prefix_value", "public_delegated_sub_prefixs": [ { + "allocatable_prefix_length": 2626, "delegatee_project": "delegatee_project_value", "description": "description_value", "ip_cidr_range": "ip_cidr_range_value", "is_address": True, + "mode": "mode_value", "name": "name_value", "region": "region_value", "status": "status_value", @@ -3732,6 +4024,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchGlobalPublicDelegatedPrefixeRequest, ): @@ -3930,7 +4262,7 @@ def test_patch_unary_rest_flattened(): project="project_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) mock_args.update(sample_request) @@ -3971,7 +4303,7 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): project="project_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py index a272751bb8bb..b87702c15b6b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_health_checks.py @@ -988,6 +988,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListHealthChecksRequest, ): @@ -1391,6 +1427,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteHealthCheckRequest): transport_class = transports.HealthChecksRestTransport @@ -1690,6 +1766,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteHealthCheckRequest, ): @@ -1993,6 +2109,42 @@ def test_get_rest(request_type): assert response.unhealthy_threshold == 2046 +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetHealthCheckRequest): transport_class = transports.HealthChecksRestTransport @@ -2443,6 +2595,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertHealthCheckRequest): transport_class = transports.HealthChecksRestTransport @@ -2872,6 +3064,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertHealthCheckRequest, ): @@ -3156,6 +3388,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListHealthChecksRequest): transport_class = transports.HealthChecksRestTransport @@ -3672,6 +3940,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchHealthCheckRequest): transport_class = transports.HealthChecksRestTransport @@ -4108,6 +4416,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchHealthCheckRequest): transport_class = transports.HealthChecksRestTransport @@ -4566,6 +4914,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateHealthCheckRequest): transport_class = transports.HealthChecksRestTransport @@ -5002,6 +5390,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateHealthCheckRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py index 6cb225c03a69..0b7fab1cfc38 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_image_family_views.py @@ -1006,6 +1006,42 @@ def test_get_rest(request_type): assert isinstance(response, compute.ImageFamilyView) +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetImageFamilyViewRequest): transport_class = transports.ImageFamilyViewsRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py index 89b0b214b495..38c6eba83894 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_images.py @@ -993,6 +993,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteImageRequest): transport_class = transports.ImagesRestTransport @@ -1288,6 +1328,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteImageRequest): transport_class = transports.ImagesRestTransport @@ -1685,6 +1765,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_deprecate_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.deprecate in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.deprecate] = mock_rpc + + request = {} + client.deprecate(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.deprecate(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_deprecate_rest_required_fields(request_type=compute.DeprecateImageRequest): transport_class = transports.ImagesRestTransport @@ -2068,6 +2188,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_deprecate_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.deprecate in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.deprecate] = mock_rpc + + request = {} + client.deprecate_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.deprecate_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_deprecate_unary_rest_required_fields( request_type=compute.DeprecateImageRequest, ): @@ -2401,6 +2561,42 @@ def test_get_rest(request_type): assert response.storage_locations == ["storage_locations_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetImageRequest): transport_class = transports.ImagesRestTransport @@ -2722,6 +2918,42 @@ def test_get_from_family_rest(request_type): assert response.storage_locations == ["storage_locations_value"] +def test_get_from_family_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_from_family in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_from_family] = mock_rpc + + request = {} + client.get_from_family(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_from_family(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_from_family_rest_required_fields( request_type=compute.GetFromFamilyImageRequest, ): @@ -3003,6 +3235,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyImageRequest, ): @@ -3448,6 +3716,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertImageRequest): transport_class = transports.ImagesRestTransport @@ -3873,6 +4181,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertImageRequest): transport_class = transports.ImagesRestTransport @@ -4160,6 +4508,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListImagesRequest): transport_class = transports.ImagesRestTransport @@ -4660,6 +5044,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchImageRequest): transport_class = transports.ImagesRestTransport @@ -5083,6 +5507,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchImageRequest): transport_class = transports.ImagesRestTransport @@ -5518,6 +5982,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyImageRequest, ): @@ -5924,6 +6424,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields(request_type=compute.SetLabelsImageRequest): transport_class = transports.ImagesRestTransport @@ -6304,6 +6844,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsImageRequest, ): @@ -6665,6 +7245,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsImageRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py new file mode 100644 index 000000000000..47d640263597 --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py @@ -0,0 +1,4872 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.compute_v1.services.instance_group_manager_resize_requests import ( + InstanceGroupManagerResizeRequestsClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ( + InstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint(None) + is None + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + api_endpoint + ) + == api_mtls_endpoint + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + non_googleapi + ) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert InstanceGroupManagerResizeRequestsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ( + InstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (True, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ( + InstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + InstanceGroupManagerResizeRequestsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ( + InstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "never", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ( + InstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "always", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ( + InstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + InstanceGroupManagerResizeRequestsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ( + InstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "auto", "foo.com") + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + InstanceGroupManagerResizeRequestsClient._get_client_cert_source(None, False) + is None + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + InstanceGroupManagerResizeRequestsClient._get_client_cert_source( + None, True + ) + is mock_default_cert_source + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + InstanceGroupManagerResizeRequestsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstanceGroupManagerResizeRequestsClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = InstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + default_endpoint = ( + InstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + InstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == InstanceGroupManagerResizeRequestsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == InstanceGroupManagerResizeRequestsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == InstanceGroupManagerResizeRequestsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + InstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + InstanceGroupManagerResizeRequestsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + InstanceGroupManagerResizeRequestsClient._get_universe_domain(None, None) + == InstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + InstanceGroupManagerResizeRequestsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + InstanceGroupManagerResizeRequestsClient, + transports.InstanceGroupManagerResizeRequestsRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (InstanceGroupManagerResizeRequestsClient, "rest"), + ], +) +def test_instance_group_manager_resize_requests_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.InstanceGroupManagerResizeRequestsRestTransport, "rest"), + ], +) +def test_instance_group_manager_resize_requests_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (InstanceGroupManagerResizeRequestsClient, "rest"), + ], +) +def test_instance_group_manager_resize_requests_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_instance_group_manager_resize_requests_client_get_transport_class(): + transport = InstanceGroupManagerResizeRequestsClient.get_transport_class() + available_transports = [ + transports.InstanceGroupManagerResizeRequestsRestTransport, + ] + assert transport in available_transports + + transport = InstanceGroupManagerResizeRequestsClient.get_transport_class("rest") + assert transport == transports.InstanceGroupManagerResizeRequestsRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + InstanceGroupManagerResizeRequestsClient, + transports.InstanceGroupManagerResizeRequestsRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + InstanceGroupManagerResizeRequestsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstanceGroupManagerResizeRequestsClient), +) +def test_instance_group_manager_resize_requests_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + InstanceGroupManagerResizeRequestsClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + InstanceGroupManagerResizeRequestsClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + InstanceGroupManagerResizeRequestsClient, + transports.InstanceGroupManagerResizeRequestsRestTransport, + "rest", + "true", + ), + ( + InstanceGroupManagerResizeRequestsClient, + transports.InstanceGroupManagerResizeRequestsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + InstanceGroupManagerResizeRequestsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstanceGroupManagerResizeRequestsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instance_group_manager_resize_requests_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [InstanceGroupManagerResizeRequestsClient]) +@mock.patch.object( + InstanceGroupManagerResizeRequestsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(InstanceGroupManagerResizeRequestsClient), +) +def test_instance_group_manager_resize_requests_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [InstanceGroupManagerResizeRequestsClient]) +@mock.patch.object( + InstanceGroupManagerResizeRequestsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstanceGroupManagerResizeRequestsClient), +) +def test_instance_group_manager_resize_requests_client_client_api_endpoint( + client_class, +): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = InstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + default_endpoint = ( + InstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + InstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + InstanceGroupManagerResizeRequestsClient, + transports.InstanceGroupManagerResizeRequestsRestTransport, + "rest", + ), + ], +) +def test_instance_group_manager_resize_requests_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + InstanceGroupManagerResizeRequestsClient, + transports.InstanceGroupManagerResizeRequestsRestTransport, + "rest", + None, + ), + ], +) +def test_instance_group_manager_resize_requests_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.CancelInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_cancel_rest(request_type): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_cancel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel] = mock_rpc + + request = {} + client.cancel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_cancel_rest_required_fields( + request_type=compute.CancelInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.InstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["resize_request"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["resizeRequest"] = "resize_request_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_rest_unset_required_fields(): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "project", + "resizeRequest", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_cancel" + ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_cancel" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CancelInstanceGroupManagerResizeRequestRequest.pb( + compute.CancelInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CancelInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.cancel( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_rest_bad_request( + transport: str = "rest", + request_type=compute.CancelInstanceGroupManagerResizeRequestRequest, +): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel(request) + + +def test_cancel_rest_flattened(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.cancel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}/cancel" + % client.transport._host, + args[1], + ) + + +def test_cancel_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel( + compute.CancelInstanceGroupManagerResizeRequestRequest(), + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_cancel_rest_error(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.CancelInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_cancel_unary_rest(request_type): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_cancel_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel] = mock_rpc + + request = {} + client.cancel_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_cancel_unary_rest_required_fields( + request_type=compute.CancelInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.InstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["resize_request"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["resizeRequest"] = "resize_request_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "project", + "resizeRequest", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_cancel" + ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_cancel" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.CancelInstanceGroupManagerResizeRequestRequest.pb( + compute.CancelInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.CancelInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.cancel_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_unary_rest_bad_request( + transport: str = "rest", + request_type=compute.CancelInstanceGroupManagerResizeRequestRequest, +): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_unary(request) + + +def test_cancel_unary_rest_flattened(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.cancel_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}/cancel" + % client.transport._host, + args[1], + ) + + +def test_cancel_unary_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_unary( + compute.CancelInstanceGroupManagerResizeRequestRequest(), + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_cancel_unary_rest_error(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_delete_rest(request_type): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rest_required_fields( + request_type=compute.DeleteInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.InstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["resize_request"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["resizeRequest"] = "resize_request_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "project", + "resizeRequest", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_delete" + ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_delete" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceGroupManagerResizeRequestRequest.pb( + compute.DeleteInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteInstanceGroupManagerResizeRequestRequest, +): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteInstanceGroupManagerResizeRequestRequest(), + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_delete_rest_error(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_delete_unary_rest(request_type): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.InstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["resize_request"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["resizeRequest"] = "resize_request_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "project", + "resizeRequest", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_delete" + ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_delete" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteInstanceGroupManagerResizeRequestRequest.pb( + compute.DeleteInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteInstanceGroupManagerResizeRequestRequest, +): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteInstanceGroupManagerResizeRequestRequest(), + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_delete_unary_rest_error(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_get_rest(request_type): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value", + description="description_value", + id=205, + kind="kind_value", + name="name_value", + resize_by=972, + self_link="self_link_value", + self_link_with_id="self_link_with_id_value", + state="state_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequest.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceGroupManagerResizeRequest) + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.resize_by == 972 + assert response.self_link == "self_link_value" + assert response.self_link_with_id == "self_link_with_id_value" + assert response.state == "state_value" + assert response.zone == "zone_value" + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields( + request_type=compute.GetInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.InstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["resize_request"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["resizeRequest"] = "resize_request_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequest.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instanceGroupManager", + "project", + "resizeRequest", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_get" + ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_get" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInstanceGroupManagerResizeRequestRequest.pb( + compute.GetInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceGroupManagerResizeRequest.to_json( + compute.InstanceGroupManagerResizeRequest() + ) + + request = compute.GetInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManagerResizeRequest() + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request( + transport: str = "rest", + request_type=compute.GetInstanceGroupManagerResizeRequestRequest, +): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequest() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequest.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInstanceGroupManagerResizeRequestRequest(), + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_get_rest_error(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_insert_rest(request_type): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init["instance_group_manager_resize_request_resource"] = { + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "name": "name_value", + "requested_run_duration": {"nanos": 543, "seconds": 751}, + "resize_by": 972, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "status": { + "error": { + "errors": [ + { + "code": "code_value", + "error_details": [ + { + "error_info": { + "domain": "domain_value", + "metadatas": {}, + "reason": "reason_value", + }, + "help_": { + "links": [ + { + "description": "description_value", + "url": "url_value", + } + ] + }, + "localized_message": { + "locale": "locale_value", + "message": "message_value", + }, + "quota_info": { + "dimensions": {}, + "future_limit": 0.1305, + "limit": 0.543, + "limit_name": "limit_name_value", + "metric_name": "metric_name_value", + "rollout_status": "rollout_status_value", + }, + } + ], + "location": "location_value", + "message": "message_value", + } + ] + }, + "last_attempt": {"error": {}}, + }, + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceGroupManagerResizeRequestRequest.meta.fields[ + "instance_group_manager_resize_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instance_group_manager_resize_request_resource"][ + field + ] + ), + ): + del request_init["instance_group_manager_resize_request_resource"][ + field + ][i][subfield] + else: + del request_init["instance_group_manager_resize_request_resource"][ + field + ][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_rest_required_fields( + request_type=compute.InsertInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.InstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "instanceGroupManagerResizeRequestResource", + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceGroupManagerResizeRequestRequest.pb( + compute.InsertInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request( + transport: str = "rest", + request_type=compute.InsertInstanceGroupManagerResizeRequestRequest, +): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + instance_group_manager_resize_request_resource=compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertInstanceGroupManagerResizeRequestRequest(), + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + instance_group_manager_resize_request_resource=compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_insert_rest_error(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_insert_unary_rest(request_type): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init["instance_group_manager_resize_request_resource"] = { + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "name": "name_value", + "requested_run_duration": {"nanos": 543, "seconds": 751}, + "resize_by": 972, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "status": { + "error": { + "errors": [ + { + "code": "code_value", + "error_details": [ + { + "error_info": { + "domain": "domain_value", + "metadatas": {}, + "reason": "reason_value", + }, + "help_": { + "links": [ + { + "description": "description_value", + "url": "url_value", + } + ] + }, + "localized_message": { + "locale": "locale_value", + "message": "message_value", + }, + "quota_info": { + "dimensions": {}, + "future_limit": 0.1305, + "limit": 0.543, + "limit_name": "limit_name_value", + "metric_name": "metric_name_value", + "rollout_status": "rollout_status_value", + }, + } + ], + "location": "location_value", + "message": "message_value", + } + ] + }, + "last_attempt": {"error": {}}, + }, + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstanceGroupManagerResizeRequestRequest.meta.fields[ + "instance_group_manager_resize_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instance_group_manager_resize_request_resource"][ + field + ] + ), + ): + del request_init["instance_group_manager_resize_request_resource"][ + field + ][i][subfield] + else: + del request_init["instance_group_manager_resize_request_resource"][ + field + ][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.InstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "instanceGroupManagerResizeRequestResource", + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertInstanceGroupManagerResizeRequestRequest.pb( + compute.InsertInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request( + transport: str = "rest", + request_type=compute.InsertInstanceGroupManagerResizeRequestRequest, +): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + instance_group_manager_resize_request_resource=compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertInstanceGroupManagerResizeRequestRequest(), + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + instance_group_manager_resize_request_resource=compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_insert_unary_rest_error(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListInstanceGroupManagerResizeRequestsRequest, + dict, + ], +) +def test_list_rest(request_type): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequestsListResponse( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequestsListResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields( + request_type=compute.ListInstanceGroupManagerResizeRequestsRequest, +): + transport_class = transports.InstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequestsListResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequestsListResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "instanceGroupManager", + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "post_list" + ) as post, mock.patch.object( + transports.InstanceGroupManagerResizeRequestsRestInterceptor, "pre_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListInstanceGroupManagerResizeRequestsRequest.pb( + compute.ListInstanceGroupManagerResizeRequestsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + compute.InstanceGroupManagerResizeRequestsListResponse.to_json( + compute.InstanceGroupManagerResizeRequestsListResponse() + ) + ) + + request = compute.ListInstanceGroupManagerResizeRequestsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManagerResizeRequestsListResponse() + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request( + transport: str = "rest", + request_type=compute.ListInstanceGroupManagerResizeRequestsRequest, +): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequestsListResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequestsListResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resizeRequests" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInstanceGroupManagerResizeRequestsRequest(), + project="project_value", + zone="zone_value", + instance_group_manager="instance_group_manager_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.InstanceGroupManagerResizeRequestsListResponse( + items=[ + compute.InstanceGroupManagerResizeRequest(), + compute.InstanceGroupManagerResizeRequest(), + compute.InstanceGroupManagerResizeRequest(), + ], + next_page_token="abc", + ), + compute.InstanceGroupManagerResizeRequestsListResponse( + items=[], + next_page_token="def", + ), + compute.InstanceGroupManagerResizeRequestsListResponse( + items=[ + compute.InstanceGroupManagerResizeRequest(), + ], + next_page_token="ghi", + ), + compute.InstanceGroupManagerResizeRequestsListResponse( + items=[ + compute.InstanceGroupManagerResizeRequest(), + compute.InstanceGroupManagerResizeRequest(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + compute.InstanceGroupManagerResizeRequestsListResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, compute.InstanceGroupManagerResizeRequest) for i in results + ) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupManagerResizeRequestsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceGroupManagerResizeRequestsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceGroupManagerResizeRequestsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceGroupManagerResizeRequestsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstanceGroupManagerResizeRequestsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InstanceGroupManagerResizeRequestsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = InstanceGroupManagerResizeRequestsClient.get_transport_class( + transport_name + )( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_instance_group_manager_resize_requests_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstanceGroupManagerResizeRequestsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_instance_group_manager_resize_requests_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.instance_group_manager_resize_requests.transports.InstanceGroupManagerResizeRequestsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.InstanceGroupManagerResizeRequestsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "cancel", + "delete", + "get", + "insert", + "list", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instance_group_manager_resize_requests_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.compute_v1.services.instance_group_manager_resize_requests.transports.InstanceGroupManagerResizeRequestsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceGroupManagerResizeRequestsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_instance_group_manager_resize_requests_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.compute_v1.services.instance_group_manager_resize_requests.transports.InstanceGroupManagerResizeRequestsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceGroupManagerResizeRequestsTransport() + adc.assert_called_once() + + +def test_instance_group_manager_resize_requests_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstanceGroupManagerResizeRequestsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_instance_group_manager_resize_requests_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.InstanceGroupManagerResizeRequestsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instance_group_manager_resize_requests_host_no_port(transport_name): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instance_group_manager_resize_requests_host_with_port(transport_name): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instance_group_manager_resize_requests_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstanceGroupManagerResizeRequestsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstanceGroupManagerResizeRequestsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.cancel._session + session2 = client2.transport.cancel._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = InstanceGroupManagerResizeRequestsClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InstanceGroupManagerResizeRequestsClient.common_billing_account_path( + **expected + ) + + # Check that the path construction is reversible. + actual = InstanceGroupManagerResizeRequestsClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = InstanceGroupManagerResizeRequestsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InstanceGroupManagerResizeRequestsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagerResizeRequestsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = InstanceGroupManagerResizeRequestsClient.common_organization_path( + organization + ) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InstanceGroupManagerResizeRequestsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagerResizeRequestsClient.parse_common_organization_path( + path + ) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = InstanceGroupManagerResizeRequestsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InstanceGroupManagerResizeRequestsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagerResizeRequestsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = InstanceGroupManagerResizeRequestsClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InstanceGroupManagerResizeRequestsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceGroupManagerResizeRequestsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.InstanceGroupManagerResizeRequestsTransport, "_prep_wrapped_messages" + ) as prep: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.InstanceGroupManagerResizeRequestsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = InstanceGroupManagerResizeRequestsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + InstanceGroupManagerResizeRequestsClient, + transports.InstanceGroupManagerResizeRequestsRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py index 587aab7e53b1..aa8dcbea5a05 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py @@ -1176,6 +1176,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_abandon_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.abandon_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.abandon_instances + ] = mock_rpc + + request = {} + client.abandon_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.abandon_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_abandon_instances_rest_required_fields( request_type=compute.AbandonInstancesInstanceGroupManagerRequest, ): @@ -1590,6 +1632,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_abandon_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.abandon_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.abandon_instances + ] = mock_rpc + + request = {} + client.abandon_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.abandon_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_abandon_instances_unary_rest_required_fields( request_type=compute.AbandonInstancesInstanceGroupManagerRequest, ): @@ -1903,6 +1987,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListInstanceGroupManagersRequest, ): @@ -2399,6 +2519,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_apply_updates_to_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.apply_updates_to_instances + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.apply_updates_to_instances + ] = mock_rpc + + request = {} + client.apply_updates_to_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.apply_updates_to_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_apply_updates_to_instances_rest_required_fields( request_type=compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, ): @@ -2816,6 +2981,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_apply_updates_to_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.apply_updates_to_instances + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.apply_updates_to_instances + ] = mock_rpc + + request = {} + client.apply_updates_to_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.apply_updates_to_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_apply_updates_to_instances_unary_rest_required_fields( request_type=compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, ): @@ -3264,6 +3474,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_create_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_instances + ] = mock_rpc + + request = {} + client.create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_instances_rest_required_fields( request_type=compute.CreateInstancesInstanceGroupManagerRequest, ): @@ -3690,6 +3942,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_create_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_instances + ] = mock_rpc + + request = {} + client.create_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_instances_unary_rest_required_fields( request_type=compute.CreateInstancesInstanceGroupManagerRequest, ): @@ -4041,6 +4335,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteInstanceGroupManagerRequest, ): @@ -4361,6 +4695,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteInstanceGroupManagerRequest, ): @@ -4789,6 +5163,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_delete_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_instances + ] = mock_rpc + + request = {} + client.delete_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_instances_rest_required_fields( request_type=compute.DeleteInstancesInstanceGroupManagerRequest, ): @@ -5204,6 +5620,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_delete_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_instances + ] = mock_rpc + + request = {} + client.delete_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_instances_unary_rest_required_fields( request_type=compute.DeleteInstancesInstanceGroupManagerRequest, ): @@ -5642,6 +6100,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_delete_per_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_per_instance_configs + ] = mock_rpc + + request = {} + client.delete_per_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_per_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_per_instance_configs_rest_required_fields( request_type=compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, ): @@ -6058,6 +6561,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_delete_per_instance_configs_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_per_instance_configs + ] = mock_rpc + + request = {} + client.delete_per_instance_configs_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_per_instance_configs_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_per_instance_configs_unary_rest_required_fields( request_type=compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, ): @@ -6400,6 +6948,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetInstanceGroupManagerRequest): transport_class = transports.InstanceGroupManagersRestTransport @@ -6888,6 +7472,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertInstanceGroupManagerRequest, ): @@ -7357,14 +7981,54 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) -def test_insert_unary_rest_required_fields( - request_type=compute.InsertInstanceGroupManagerRequest, -): - transport_class = transports.InstanceGroupManagersRestTransport +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - request_init = {} - request_init["project"] = "" - request_init["zone"] = "" + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertInstanceGroupManagerRequest, +): + transport_class = transports.InstanceGroupManagersRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -7660,6 +8324,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListInstanceGroupManagersRequest, ): @@ -8018,6 +8718,42 @@ def test_list_errors_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_errors_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_errors in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_errors] = mock_rpc + + request = {} + client.list_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_errors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_errors_rest_required_fields( request_type=compute.ListErrorsInstanceGroupManagersRequest, ): @@ -8403,6 +9139,47 @@ def test_list_managed_instances_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_managed_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_managed_instances + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_managed_instances + ] = mock_rpc + + request = {} + client.list_managed_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_managed_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_managed_instances_rest_required_fields( request_type=compute.ListManagedInstancesInstanceGroupManagersRequest, ): @@ -8792,6 +9569,47 @@ def test_list_per_instance_configs_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_per_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_per_instance_configs + ] = mock_rpc + + request = {} + client.list_per_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_per_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_per_instance_configs_rest_required_fields( request_type=compute.ListPerInstanceConfigsInstanceGroupManagersRequest, ): @@ -9374,6 +10192,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchInstanceGroupManagerRequest, ): @@ -9862,6 +10720,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchInstanceGroupManagerRequest, ): @@ -10317,6 +11215,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_per_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.patch_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.patch_per_instance_configs + ] = mock_rpc + + request = {} + client.patch_per_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_per_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_per_instance_configs_rest_required_fields( request_type=compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, ): @@ -10749,6 +11692,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_per_instance_configs_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.patch_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.patch_per_instance_configs + ] = mock_rpc + + request = {} + client.patch_per_instance_configs_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_per_instance_configs_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_per_instance_configs_unary_rest_required_fields( request_type=compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, ): @@ -11191,6 +12179,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_recreate_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.recreate_instances in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.recreate_instances + ] = mock_rpc + + request = {} + client.recreate_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.recreate_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_recreate_instances_rest_required_fields( request_type=compute.RecreateInstancesInstanceGroupManagerRequest, ): @@ -11605,6 +12637,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_recreate_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.recreate_instances in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.recreate_instances + ] = mock_rpc + + request = {} + client.recreate_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.recreate_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_recreate_instances_unary_rest_required_fields( request_type=compute.RecreateInstancesInstanceGroupManagerRequest, ): @@ -11956,6 +13032,46 @@ def test_resize_rest(request_type): assert response.zone == "zone_value" +def test_resize_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_rest_required_fields( request_type=compute.ResizeInstanceGroupManagerRequest, ): @@ -12301,6 +13417,46 @@ def test_resize_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_resize_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_unary_rest_required_fields( request_type=compute.ResizeInstanceGroupManagerRequest, ): @@ -12753,6 +13909,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_instance_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_instance_template + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_instance_template + ] = mock_rpc + + request = {} + client.set_instance_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_instance_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_instance_template_rest_required_fields( request_type=compute.SetInstanceTemplateInstanceGroupManagerRequest, ): @@ -13167,6 +14368,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_instance_template_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_instance_template + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_instance_template + ] = mock_rpc + + request = {} + client.set_instance_template_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_instance_template_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_instance_template_unary_rest_required_fields( request_type=compute.SetInstanceTemplateInstanceGroupManagerRequest, ): @@ -13604,6 +14850,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_target_pools_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_target_pools in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_target_pools + ] = mock_rpc + + request = {} + client.set_target_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_target_pools(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_target_pools_rest_required_fields( request_type=compute.SetTargetPoolsInstanceGroupManagerRequest, ): @@ -14019,6 +15307,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_target_pools_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_target_pools in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_target_pools + ] = mock_rpc + + request = {} + client.set_target_pools_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_target_pools_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_target_pools_unary_rest_required_fields( request_type=compute.SetTargetPoolsInstanceGroupManagerRequest, ): @@ -14469,6 +15799,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_per_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_per_instance_configs + ] = mock_rpc + + request = {} + client.update_per_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_per_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_per_instance_configs_rest_required_fields( request_type=compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, ): @@ -14903,6 +16278,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_per_instance_configs_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_per_instance_configs + ] = mock_rpc + + request = {} + client.update_per_instance_configs_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_per_instance_configs_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_per_instance_configs_unary_rest_required_fields( request_type=compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py index 2eed7ca85596..28282fcc8aa0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_groups.py @@ -1128,6 +1128,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_instances] = mock_rpc + + request = {} + client.add_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_instances_rest_required_fields( request_type=compute.AddInstancesInstanceGroupRequest, ): @@ -1541,6 +1581,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_instances] = mock_rpc + + request = {} + client.add_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_instances_unary_rest_required_fields( request_type=compute.AddInstancesInstanceGroupRequest, ): @@ -1853,6 +1933,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListInstanceGroupsRequest, ): @@ -2260,6 +2376,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteInstanceGroupRequest): transport_class = transports.InstanceGroupsRestTransport @@ -2578,6 +2734,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteInstanceGroupRequest, ): @@ -2900,6 +3096,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetInstanceGroupRequest): transport_class = transports.InstanceGroupsRestTransport @@ -3322,6 +3554,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertInstanceGroupRequest): transport_class = transports.InstanceGroupsRestTransport @@ -3715,6 +3987,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertInstanceGroupRequest, ): @@ -4010,6 +4322,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListInstanceGroupsRequest): transport_class = transports.InstanceGroupsRestTransport @@ -4457,6 +4805,42 @@ def get_message_fields(field): assert response.self_link == "self_link_value" +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_instances_rest_required_fields( request_type=compute.ListInstancesInstanceGroupsRequest, ): @@ -4976,6 +5360,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_remove_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_instances + ] = mock_rpc + + request = {} + client.remove_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_instances_rest_required_fields( request_type=compute.RemoveInstancesInstanceGroupRequest, ): @@ -5389,6 +5815,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_remove_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_instances + ] = mock_rpc + + request = {} + client.remove_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_instances_unary_rest_required_fields( request_type=compute.RemoveInstancesInstanceGroupRequest, ): @@ -5825,6 +6293,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_named_ports_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_named_ports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_named_ports] = mock_rpc + + request = {} + client.set_named_ports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_named_ports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_named_ports_rest_required_fields( request_type=compute.SetNamedPortsInstanceGroupRequest, ): @@ -6239,6 +6747,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_named_ports_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_named_ports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_named_ports] = mock_rpc + + request = {} + client.set_named_ports_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_named_ports_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_named_ports_unary_rest_required_fields( request_type=compute.SetNamedPortsInstanceGroupRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py new file mode 100644 index 000000000000..0a8ff2b9900c --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_settings_service.py @@ -0,0 +1,2672 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.compute_v1.services.instance_settings_service import ( + InstanceSettingsServiceClient, + transports, +) +from google.cloud.compute_v1.types import compute + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert InstanceSettingsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + InstanceSettingsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + InstanceSettingsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + InstanceSettingsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + InstanceSettingsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + InstanceSettingsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert InstanceSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert InstanceSettingsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert InstanceSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + InstanceSettingsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert InstanceSettingsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert InstanceSettingsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert InstanceSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + InstanceSettingsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert InstanceSettingsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert InstanceSettingsServiceClient._get_client_cert_source(None, False) is None + assert ( + InstanceSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + InstanceSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + InstanceSettingsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + InstanceSettingsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + InstanceSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstanceSettingsServiceClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = InstanceSettingsServiceClient._DEFAULT_UNIVERSE + default_endpoint = InstanceSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = InstanceSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + InstanceSettingsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + InstanceSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == InstanceSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstanceSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + InstanceSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == InstanceSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstanceSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == InstanceSettingsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstanceSettingsServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + InstanceSettingsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + InstanceSettingsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + InstanceSettingsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + InstanceSettingsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + InstanceSettingsServiceClient._get_universe_domain(None, None) + == InstanceSettingsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + InstanceSettingsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + InstanceSettingsServiceClient, + transports.InstanceSettingsServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (InstanceSettingsServiceClient, "rest"), + ], +) +def test_instance_settings_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.InstanceSettingsServiceRestTransport, "rest"), + ], +) +def test_instance_settings_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (InstanceSettingsServiceClient, "rest"), + ], +) +def test_instance_settings_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_instance_settings_service_client_get_transport_class(): + transport = InstanceSettingsServiceClient.get_transport_class() + available_transports = [ + transports.InstanceSettingsServiceRestTransport, + ] + assert transport in available_transports + + transport = InstanceSettingsServiceClient.get_transport_class("rest") + assert transport == transports.InstanceSettingsServiceRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + InstanceSettingsServiceClient, + transports.InstanceSettingsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + InstanceSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstanceSettingsServiceClient), +) +def test_instance_settings_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstanceSettingsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstanceSettingsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + InstanceSettingsServiceClient, + transports.InstanceSettingsServiceRestTransport, + "rest", + "true", + ), + ( + InstanceSettingsServiceClient, + transports.InstanceSettingsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + InstanceSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstanceSettingsServiceClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instance_settings_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [InstanceSettingsServiceClient]) +@mock.patch.object( + InstanceSettingsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(InstanceSettingsServiceClient), +) +def test_instance_settings_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [InstanceSettingsServiceClient]) +@mock.patch.object( + InstanceSettingsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstanceSettingsServiceClient), +) +def test_instance_settings_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = InstanceSettingsServiceClient._DEFAULT_UNIVERSE + default_endpoint = InstanceSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = InstanceSettingsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + InstanceSettingsServiceClient, + transports.InstanceSettingsServiceRestTransport, + "rest", + ), + ], +) +def test_instance_settings_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + InstanceSettingsServiceClient, + transports.InstanceSettingsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_instance_settings_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetInstanceSettingRequest, + dict, + ], +) +def test_get_rest(request_type): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceSettings( + fingerprint="fingerprint_value", + kind="kind_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstanceSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceSettings) + assert response.fingerprint == "fingerprint_value" + assert response.kind == "kind_value" + assert response.zone == "zone_value" + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields(request_type=compute.GetInstanceSettingRequest): + transport_class = transports.InstanceSettingsServiceRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstanceSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceSettingsServiceRestInterceptor(), + ) + client = InstanceSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceSettingsServiceRestInterceptor, "post_get" + ) as post, mock.patch.object( + transports.InstanceSettingsServiceRestInterceptor, "pre_get" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetInstanceSettingRequest.pb( + compute.GetInstanceSettingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.InstanceSettings.to_json( + compute.InstanceSettings() + ) + + request = compute.GetInstanceSettingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceSettings() + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetInstanceSettingRequest +): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstanceSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceSettings" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInstanceSettingRequest(), + project="project_value", + zone="zone_value", + ) + + +def test_get_rest_error(): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchInstanceSettingRequest, + dict, + ], +) +def test_patch_rest(request_type): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instance_settings_resource"] = { + "fingerprint": "fingerprint_value", + "kind": "kind_value", + "metadata": {"items": {}, "kind": "kind_value"}, + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchInstanceSettingRequest.meta.fields[ + "instance_settings_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_settings_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_settings_resource"][field]) + ): + del request_init["instance_settings_resource"][field][i][subfield] + else: + del request_init["instance_settings_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_patch_rest_required_fields(request_type=compute.PatchInstanceSettingRequest): + transport_class = transports.InstanceSettingsServiceRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "instanceSettingsResource", + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceSettingsServiceRestInterceptor(), + ) + client = InstanceSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceSettingsServiceRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.InstanceSettingsServiceRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchInstanceSettingRequest.pb( + compute.PatchInstanceSettingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchInstanceSettingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchInstanceSettingRequest +): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + +def test_patch_rest_flattened(): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_settings_resource=compute.InstanceSettings( + fingerprint="fingerprint_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceSettings" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchInstanceSettingRequest(), + project="project_value", + zone="zone_value", + instance_settings_resource=compute.InstanceSettings( + fingerprint="fingerprint_value" + ), + ) + + +def test_patch_rest_error(): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchInstanceSettingRequest, + dict, + ], +) +def test_patch_unary_rest(request_type): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instance_settings_resource"] = { + "fingerprint": "fingerprint_value", + "kind": "kind_value", + "metadata": {"items": {}, "kind": "kind_value"}, + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchInstanceSettingRequest.meta.fields[ + "instance_settings_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_settings_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instance_settings_resource"][field]) + ): + del request_init["instance_settings_resource"][field][i][subfield] + else: + del request_init["instance_settings_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.patch_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchInstanceSettingRequest, +): + transport_class = transports.InstanceSettingsServiceRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "instanceSettingsResource", + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_unary_rest_interceptors(null_interceptor): + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstanceSettingsServiceRestInterceptor(), + ) + client = InstanceSettingsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.InstanceSettingsServiceRestInterceptor, "post_patch" + ) as post, mock.patch.object( + transports.InstanceSettingsServiceRestInterceptor, "pre_patch" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PatchInstanceSettingRequest.pb( + compute.PatchInstanceSettingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PatchInstanceSettingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.patch_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_patch_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PatchInstanceSettingRequest +): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_unary(request) + + +def test_patch_unary_rest_flattened(): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instance_settings_resource=compute.InstanceSettings( + fingerprint="fingerprint_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instanceSettings" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchInstanceSettingRequest(), + project="project_value", + zone="zone_value", + instance_settings_resource=compute.InstanceSettings( + fingerprint="fingerprint_value" + ), + ) + + +def test_patch_unary_rest_error(): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceSettingsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceSettingsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstanceSettingsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstanceSettingsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstanceSettingsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstanceSettingsServiceClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InstanceSettingsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = InstanceSettingsServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_instance_settings_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstanceSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_instance_settings_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.instance_settings_service.transports.InstanceSettingsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.InstanceSettingsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get", + "patch", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instance_settings_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.compute_v1.services.instance_settings_service.transports.InstanceSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceSettingsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_instance_settings_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.compute_v1.services.instance_settings_service.transports.InstanceSettingsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstanceSettingsServiceTransport() + adc.assert_called_once() + + +def test_instance_settings_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstanceSettingsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_instance_settings_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.InstanceSettingsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instance_settings_service_host_no_port(transport_name): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instance_settings_service_host_with_port(transport_name): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instance_settings_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstanceSettingsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstanceSettingsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = InstanceSettingsServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InstanceSettingsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceSettingsServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = InstanceSettingsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InstanceSettingsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceSettingsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = InstanceSettingsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InstanceSettingsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceSettingsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = InstanceSettingsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InstanceSettingsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceSettingsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = InstanceSettingsServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InstanceSettingsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstanceSettingsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.InstanceSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.InstanceSettingsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = InstanceSettingsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InstanceSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + InstanceSettingsServiceClient, + transports.InstanceSettingsServiceRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py index 14aa8eede6ba..84d9a02c6064 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -1031,6 +1031,42 @@ def test_aggregated_list_rest(request_type): assert response.self_link == "self_link_value" +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListInstanceTemplatesRequest, ): @@ -1434,6 +1470,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteInstanceTemplateRequest, ): @@ -1735,6 +1811,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteInstanceTemplateRequest, ): @@ -2030,6 +2146,42 @@ def test_get_rest(request_type): assert response.source_instance == "source_instance_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetInstanceTemplateRequest): transport_class = transports.InstanceTemplatesRestTransport @@ -2313,6 +2465,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyInstanceTemplateRequest, ): @@ -2627,6 +2815,7 @@ def test_insert_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -2896,6 +3085,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertInstanceTemplateRequest, ): @@ -3211,6 +3440,7 @@ def test_insert_unary_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -3458,6 +3688,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertInstanceTemplateRequest, ): @@ -3746,6 +4016,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListInstanceTemplatesRequest): transport_class = transports.InstanceTemplatesRestTransport @@ -4243,6 +4549,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyInstanceTemplateRequest, ): @@ -4608,6 +4950,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsInstanceTemplateRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py index a8360fa8c1ca..44db86a88191 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py @@ -1093,6 +1093,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_access_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_access_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_access_config + ] = mock_rpc + + request = {} + client.add_access_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_access_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_access_config_rest_required_fields( request_type=compute.AddAccessConfigInstanceRequest, ): @@ -1519,6 +1561,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_access_config_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_access_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_access_config + ] = mock_rpc + + request = {} + client.add_access_config_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_access_config_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_access_config_unary_rest_required_fields( request_type=compute.AddAccessConfigInstanceRequest, ): @@ -1969,6 +2053,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_resource_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_resource_policies + ] = mock_rpc + + request = {} + client.add_resource_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_resource_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_resource_policies_rest_required_fields( request_type=compute.AddResourcePoliciesInstanceRequest, ): @@ -2372,6 +2501,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_resource_policies_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_resource_policies + ] = mock_rpc + + request = {} + client.add_resource_policies_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_resource_policies_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_resource_policies_unary_rest_required_fields( request_type=compute.AddResourcePoliciesInstanceRequest, ): @@ -2678,6 +2852,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListInstancesRequest, ): @@ -3045,6 +3255,7 @@ def test_attach_disk_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -3196,6 +3407,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_attach_disk_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.attach_disk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.attach_disk] = mock_rpc + + request = {} + client.attach_disk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.attach_disk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_attach_disk_rest_required_fields( request_type=compute.AttachDiskInstanceRequest, ): @@ -3517,6 +3768,7 @@ def test_attach_disk_unary_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -3646,6 +3898,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_attach_disk_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.attach_disk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.attach_disk] = mock_rpc + + request = {} + client.attach_disk_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.attach_disk_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_attach_disk_unary_rest_required_fields( request_type=compute.AttachDiskInstanceRequest, ): @@ -3983,6 +4275,7 @@ def test_bulk_insert_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -4249,6 +4542,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_bulk_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.bulk_insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.bulk_insert] = mock_rpc + + request = {} + client.bulk_insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_bulk_insert_rest_required_fields( request_type=compute.BulkInsertInstanceRequest, ): @@ -4565,6 +4898,7 @@ def test_bulk_insert_unary_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -4809,6 +5143,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_bulk_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.bulk_insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.bulk_insert] = mock_rpc + + request = {} + client.bulk_insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_bulk_insert_unary_rest_required_fields( request_type=compute.BulkInsertInstanceRequest, ): @@ -5138,6 +5512,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteInstanceRequest): transport_class = transports.InstancesRestTransport @@ -5444,6 +5858,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteInstanceRequest): transport_class = transports.InstancesRestTransport @@ -5772,6 +6226,50 @@ def test_delete_access_config_rest(request_type): assert response.zone == "zone_value" +def test_delete_access_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_access_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_access_config + ] = mock_rpc + + request = {} + client.delete_access_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_access_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_access_config_rest_required_fields( request_type=compute.DeleteAccessConfigInstanceRequest, ): @@ -6123,6 +6621,50 @@ def test_delete_access_config_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_access_config_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_access_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_access_config + ] = mock_rpc + + request = {} + client.delete_access_config_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_access_config_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_access_config_unary_rest_required_fields( request_type=compute.DeleteAccessConfigInstanceRequest, ): @@ -6496,6 +7038,46 @@ def test_detach_disk_rest(request_type): assert response.zone == "zone_value" +def test_detach_disk_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.detach_disk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.detach_disk] = mock_rpc + + request = {} + client.detach_disk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.detach_disk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_disk_rest_required_fields( request_type=compute.DetachDiskInstanceRequest, ): @@ -6831,6 +7413,46 @@ def test_detach_disk_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_detach_disk_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.detach_disk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.detach_disk] = mock_rpc + + request = {} + client.detach_disk_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.detach_disk_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_disk_unary_rest_required_fields( request_type=compute.DetachDiskInstanceRequest, ): @@ -7198,6 +7820,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetInstanceRequest): transport_class = transports.InstancesRestTransport @@ -7479,6 +8137,47 @@ def test_get_effective_firewalls_rest(request_type): assert isinstance(response, compute.InstancesGetEffectiveFirewallsResponse) +def test_get_effective_firewalls_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_effective_firewalls + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_effective_firewalls + ] = mock_rpc + + request = {} + client.get_effective_firewalls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_effective_firewalls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_effective_firewalls_rest_required_fields( request_type=compute.GetEffectiveFirewallsInstanceRequest, ): @@ -7798,6 +8497,46 @@ def test_get_guest_attributes_rest(request_type): assert response.variable_value == "variable_value_value" +def test_get_guest_attributes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_guest_attributes in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_guest_attributes + ] = mock_rpc + + request = {} + client.get_guest_attributes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_guest_attributes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_guest_attributes_rest_required_fields( request_type=compute.GetGuestAttributesInstanceRequest, ): @@ -8104,6 +8843,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyInstanceRequest, ): @@ -8396,6 +9171,42 @@ def test_get_screenshot_rest(request_type): assert response.kind == "kind_value" +def test_get_screenshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_screenshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_screenshot] = mock_rpc + + request = {} + client.get_screenshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_screenshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_screenshot_rest_required_fields( request_type=compute.GetScreenshotInstanceRequest, ): @@ -8692,6 +9503,47 @@ def test_get_serial_port_output_rest(request_type): assert response.start == 558 +def test_get_serial_port_output_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_serial_port_output + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_serial_port_output + ] = mock_rpc + + request = {} + client.get_serial_port_output(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_serial_port_output(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_serial_port_output_rest_required_fields( request_type=compute.GetSerialPortOutputInstanceRequest, ): @@ -8994,6 +9846,47 @@ def test_get_shielded_instance_identity_rest(request_type): assert response.kind == "kind_value" +def test_get_shielded_instance_identity_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_shielded_instance_identity + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_shielded_instance_identity + ] = mock_rpc + + request = {} + client.get_shielded_instance_identity(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_shielded_instance_identity(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_shielded_instance_identity_rest_required_fields( request_type=compute.GetShieldedInstanceIdentityInstanceRequest, ): @@ -9314,6 +10207,7 @@ def test_insert_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -9589,6 +10483,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertInstanceRequest): transport_class = transports.InstancesRestTransport @@ -9915,6 +10849,7 @@ def test_insert_unary_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -10168,6 +11103,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertInstanceRequest): transport_class = transports.InstancesRestTransport @@ -10473,6 +11448,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListInstancesRequest): transport_class = transports.InstancesRestTransport @@ -10825,6 +11836,42 @@ def test_list_referrers_rest(request_type): assert response.self_link == "self_link_value" +def test_list_referrers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_referrers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_referrers] = mock_rpc + + request = {} + client.list_referrers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_referrers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_referrers_rest_required_fields( request_type=compute.ListReferrersInstancesRequest, ): @@ -11234,6 +12281,50 @@ def test_perform_maintenance_rest(request_type): assert response.zone == "zone_value" +def test_perform_maintenance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.perform_maintenance in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.perform_maintenance + ] = mock_rpc + + request = {} + client.perform_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.perform_maintenance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_perform_maintenance_rest_required_fields( request_type=compute.PerformMaintenanceInstanceRequest, ): @@ -11544,6 +12635,50 @@ def test_perform_maintenance_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_perform_maintenance_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.perform_maintenance in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.perform_maintenance + ] = mock_rpc + + request = {} + client.perform_maintenance_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.perform_maintenance_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_perform_maintenance_unary_rest_required_fields( request_type=compute.PerformMaintenanceInstanceRequest, ): @@ -11961,6 +13096,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_remove_resource_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_resource_policies + ] = mock_rpc + + request = {} + client.remove_resource_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_resource_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_resource_policies_rest_required_fields( request_type=compute.RemoveResourcePoliciesInstanceRequest, ): @@ -12364,6 +13544,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_remove_resource_policies_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_resource_policies + ] = mock_rpc + + request = {} + client.remove_resource_policies_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_resource_policies_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_resource_policies_unary_rest_required_fields( request_type=compute.RemoveResourcePoliciesInstanceRequest, ): @@ -12704,6 +13929,46 @@ def test_reset_rest(request_type): assert response.zone == "zone_value" +def test_reset_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.reset] = mock_rpc + + request = {} + client.reset(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.reset(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_reset_rest_required_fields(request_type=compute.ResetInstanceRequest): transport_class = transports.InstancesRestTransport @@ -13010,6 +14275,46 @@ def test_reset_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_reset_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reset in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.reset] = mock_rpc + + request = {} + client.reset_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.reset_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_reset_unary_rest_required_fields(request_type=compute.ResetInstanceRequest): transport_class = transports.InstancesRestTransport @@ -13338,6 +14643,46 @@ def test_resume_rest(request_type): assert response.zone == "zone_value" +def test_resume_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resume in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resume] = mock_rpc + + request = {} + client.resume(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resume(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resume_rest_required_fields(request_type=compute.ResumeInstanceRequest): transport_class = transports.InstancesRestTransport @@ -13644,6 +14989,46 @@ def test_resume_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_resume_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resume in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resume] = mock_rpc + + request = {} + client.resume_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resume_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resume_unary_rest_required_fields(request_type=compute.ResumeInstanceRequest): transport_class = transports.InstancesRestTransport @@ -13927,6 +15312,47 @@ def test_send_diagnostic_interrupt_rest(request_type): assert isinstance(response, compute.SendDiagnosticInterruptInstanceResponse) +def test_send_diagnostic_interrupt_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.send_diagnostic_interrupt + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.send_diagnostic_interrupt + ] = mock_rpc + + request = {} + client.send_diagnostic_interrupt(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.send_diagnostic_interrupt(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_send_diagnostic_interrupt_rest_required_fields( request_type=compute.SendDiagnosticInterruptInstanceRequest, ): @@ -14263,6 +15689,51 @@ def test_set_deletion_protection_rest(request_type): assert response.zone == "zone_value" +def test_set_deletion_protection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_deletion_protection + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_deletion_protection + ] = mock_rpc + + request = {} + client.set_deletion_protection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_deletion_protection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_deletion_protection_rest_required_fields( request_type=compute.SetDeletionProtectionInstanceRequest, ): @@ -14583,6 +16054,51 @@ def test_set_deletion_protection_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_set_deletion_protection_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_deletion_protection + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_deletion_protection + ] = mock_rpc + + request = {} + client.set_deletion_protection_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_deletion_protection_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_deletion_protection_unary_rest_required_fields( request_type=compute.SetDeletionProtectionInstanceRequest, ): @@ -14925,6 +16441,50 @@ def test_set_disk_auto_delete_rest(request_type): assert response.zone == "zone_value" +def test_set_disk_auto_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_disk_auto_delete in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_disk_auto_delete + ] = mock_rpc + + request = {} + client.set_disk_auto_delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_disk_auto_delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_disk_auto_delete_rest_required_fields( request_type=compute.SetDiskAutoDeleteInstanceRequest, ): @@ -15276,6 +16836,50 @@ def test_set_disk_auto_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_set_disk_auto_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_disk_auto_delete in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_disk_auto_delete + ] = mock_rpc + + request = {} + client.set_disk_auto_delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_disk_auto_delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_disk_auto_delete_unary_rest_required_fields( request_type=compute.SetDiskAutoDeleteInstanceRequest, ): @@ -15763,6 +17367,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyInstanceRequest, ): @@ -16182,6 +17822,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields(request_type=compute.SetLabelsInstanceRequest): transport_class = transports.InstancesRestTransport @@ -16579,6 +18259,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsInstanceRequest, ): @@ -17006,6 +18726,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_machine_resources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_machine_resources + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_machine_resources + ] = mock_rpc + + request = {} + client.set_machine_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_machine_resources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_machine_resources_rest_required_fields( request_type=compute.SetMachineResourcesInstanceRequest, ): @@ -17411,6 +19176,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_machine_resources_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_machine_resources + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_machine_resources + ] = mock_rpc + + request = {} + client.set_machine_resources_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_machine_resources_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_machine_resources_unary_rest_required_fields( request_type=compute.SetMachineResourcesInstanceRequest, ): @@ -17836,6 +19646,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_machine_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_machine_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_machine_type + ] = mock_rpc + + request = {} + client.set_machine_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_machine_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_machine_type_rest_required_fields( request_type=compute.SetMachineTypeInstanceRequest, ): @@ -18239,6 +20091,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_machine_type_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_machine_type in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_machine_type + ] = mock_rpc + + request = {} + client.set_machine_type_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_machine_type_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_machine_type_unary_rest_required_fields( request_type=compute.SetMachineTypeInstanceRequest, ): @@ -18651,6 +20545,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_metadata_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_metadata in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_metadata] = mock_rpc + + request = {} + client.set_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_metadata_rest_required_fields( request_type=compute.SetMetadataInstanceRequest, ): @@ -19037,6 +20971,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_metadata_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_metadata in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_metadata] = mock_rpc + + request = {} + client.set_metadata_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_metadata_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_metadata_unary_rest_required_fields( request_type=compute.SetMetadataInstanceRequest, ): @@ -19458,6 +21432,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_min_cpu_platform_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_min_cpu_platform in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_min_cpu_platform + ] = mock_rpc + + request = {} + client.set_min_cpu_platform(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_min_cpu_platform(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_min_cpu_platform_rest_required_fields( request_type=compute.SetMinCpuPlatformInstanceRequest, ): @@ -19861,6 +21879,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_min_cpu_platform_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_min_cpu_platform in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_min_cpu_platform + ] = mock_rpc + + request = {} + client.set_min_cpu_platform_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_min_cpu_platform_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_min_cpu_platform_unary_rest_required_fields( request_type=compute.SetMinCpuPlatformInstanceRequest, ): @@ -20280,6 +22342,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_name_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_name in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_name] = mock_rpc + + request = {} + client.set_name(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_name(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_name_rest_required_fields(request_type=compute.SetNameInstanceRequest): transport_class = transports.InstancesRestTransport @@ -20673,6 +22775,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_name_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_name in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_name] = mock_rpc + + request = {} + client.set_name_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_name_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_name_unary_rest_required_fields( request_type=compute.SetNameInstanceRequest, ): @@ -21095,6 +23237,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_scheduling_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_scheduling in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_scheduling] = mock_rpc + + request = {} + client.set_scheduling(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_scheduling(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_scheduling_rest_required_fields( request_type=compute.SetSchedulingInstanceRequest, ): @@ -21493,6 +23675,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_scheduling_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_scheduling in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_scheduling] = mock_rpc + + request = {} + client.set_scheduling_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_scheduling_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_scheduling_unary_rest_required_fields( request_type=compute.SetSchedulingInstanceRequest, ): @@ -21918,6 +24140,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_rest_required_fields( request_type=compute.SetSecurityPolicyInstanceRequest, ): @@ -22325,6 +24591,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_security_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_unary_rest_required_fields( request_type=compute.SetSecurityPolicyInstanceRequest, ): @@ -22751,6 +25061,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_service_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_service_account in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_service_account + ] = mock_rpc + + request = {} + client.set_service_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_service_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_service_account_rest_required_fields( request_type=compute.SetServiceAccountInstanceRequest, ): @@ -23155,6 +25509,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_service_account_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_service_account in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_service_account + ] = mock_rpc + + request = {} + client.set_service_account_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_service_account_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_service_account_unary_rest_required_fields( request_type=compute.SetServiceAccountInstanceRequest, ): @@ -23580,6 +25978,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_shielded_instance_integrity_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_shielded_instance_integrity_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_shielded_instance_integrity_policy + ] = mock_rpc + + request = {} + client.set_shielded_instance_integrity_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_shielded_instance_integrity_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_shielded_instance_integrity_policy_rest_required_fields( request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, ): @@ -23994,6 +26437,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_shielded_instance_integrity_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_shielded_instance_integrity_policy + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_shielded_instance_integrity_policy + ] = mock_rpc + + request = {} + client.set_shielded_instance_integrity_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_shielded_instance_integrity_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_shielded_instance_integrity_policy_unary_rest_required_fields( request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, ): @@ -24418,6 +26906,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_tags_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_tags in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_tags] = mock_rpc + + request = {} + client.set_tags(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_tags(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_tags_rest_required_fields(request_type=compute.SetTagsInstanceRequest): transport_class = transports.InstancesRestTransport @@ -24799,6 +27327,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_tags_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_tags in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_tags] = mock_rpc + + request = {} + client.set_tags_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_tags_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_tags_unary_rest_required_fields( request_type=compute.SetTagsInstanceRequest, ): @@ -25133,6 +27701,51 @@ def test_simulate_maintenance_event_rest(request_type): assert response.zone == "zone_value" +def test_simulate_maintenance_event_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.simulate_maintenance_event + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.simulate_maintenance_event + ] = mock_rpc + + request = {} + client.simulate_maintenance_event(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.simulate_maintenance_event(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_simulate_maintenance_event_rest_required_fields( request_type=compute.SimulateMaintenanceEventInstanceRequest, ): @@ -25454,6 +28067,51 @@ def test_simulate_maintenance_event_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_simulate_maintenance_event_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.simulate_maintenance_event + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.simulate_maintenance_event + ] = mock_rpc + + request = {} + client.simulate_maintenance_event_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.simulate_maintenance_event_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_simulate_maintenance_event_unary_rest_required_fields( request_type=compute.SimulateMaintenanceEventInstanceRequest, ): @@ -25797,6 +28455,46 @@ def test_start_rest(request_type): assert response.zone == "zone_value" +def test_start_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.start in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.start] = mock_rpc + + request = {} + client.start(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_start_rest_required_fields(request_type=compute.StartInstanceRequest): transport_class = transports.InstancesRestTransport @@ -26103,6 +28801,46 @@ def test_start_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_start_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.start in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.start] = mock_rpc + + request = {} + client.start_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_start_unary_rest_required_fields(request_type=compute.StartInstanceRequest): transport_class = transports.InstancesRestTransport @@ -26527,6 +29265,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_start_with_encryption_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.start_with_encryption_key + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.start_with_encryption_key + ] = mock_rpc + + request = {} + client.start_with_encryption_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start_with_encryption_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_start_with_encryption_key_rest_required_fields( request_type=compute.StartWithEncryptionKeyInstanceRequest, ): @@ -26953,6 +29736,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_start_with_encryption_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.start_with_encryption_key + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.start_with_encryption_key + ] = mock_rpc + + request = {} + client.start_with_encryption_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start_with_encryption_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_start_with_encryption_key_unary_rest_required_fields( request_type=compute.StartWithEncryptionKeyInstanceRequest, ): @@ -27305,6 +30133,46 @@ def test_stop_rest(request_type): assert response.zone == "zone_value" +def test_stop_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.stop in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.stop] = mock_rpc + + request = {} + client.stop(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_rest_required_fields(request_type=compute.StopInstanceRequest): transport_class = transports.InstancesRestTransport @@ -27621,6 +30489,46 @@ def test_stop_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_stop_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.stop in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.stop] = mock_rpc + + request = {} + client.stop_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_unary_rest_required_fields(request_type=compute.StopInstanceRequest): transport_class = transports.InstancesRestTransport @@ -27959,6 +30867,46 @@ def test_suspend_rest(request_type): assert response.zone == "zone_value" +def test_suspend_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.suspend in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.suspend] = mock_rpc + + request = {} + client.suspend(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.suspend(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_suspend_rest_required_fields(request_type=compute.SuspendInstanceRequest): transport_class = transports.InstancesRestTransport @@ -28275,6 +31223,46 @@ def test_suspend_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_suspend_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.suspend in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.suspend] = mock_rpc + + request = {} + client.suspend_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.suspend_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_suspend_unary_rest_required_fields( request_type=compute.SuspendInstanceRequest, ): @@ -28651,6 +31639,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsInstanceRequest, ): @@ -28976,6 +32004,7 @@ def test_update_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -29251,6 +32280,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateInstanceRequest): transport_class = transports.InstancesRestTransport @@ -29588,6 +32657,7 @@ def test_update_unary_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -29841,6 +32911,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields(request_type=compute.UpdateInstanceRequest): transport_class = transports.InstancesRestTransport @@ -30276,6 +33386,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_access_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_access_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_access_config + ] = mock_rpc + + request = {} + client.update_access_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_access_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_access_config_rest_required_fields( request_type=compute.UpdateAccessConfigInstanceRequest, ): @@ -30702,6 +33856,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_access_config_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_access_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_access_config + ] = mock_rpc + + request = {} + client.update_access_config_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_access_config_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_access_config_unary_rest_required_fields( request_type=compute.UpdateAccessConfigInstanceRequest, ): @@ -31139,6 +34337,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_display_device_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_display_device + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_display_device + ] = mock_rpc + + request = {} + client.update_display_device(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_display_device(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_display_device_rest_required_fields( request_type=compute.UpdateDisplayDeviceInstanceRequest, ): @@ -31525,6 +34768,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_display_device_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_display_device + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_display_device + ] = mock_rpc + + request = {} + client.update_display_device_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_display_device_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_display_device_unary_rest_required_fields( request_type=compute.UpdateDisplayDeviceInstanceRequest, ): @@ -31970,6 +35258,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_network_interface_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_network_interface + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_network_interface + ] = mock_rpc + + request = {} + client.update_network_interface(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_network_interface(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_network_interface_rest_required_fields( request_type=compute.UpdateNetworkInterfaceInstanceRequest, ): @@ -32426,6 +35759,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_network_interface_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_network_interface + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_network_interface + ] = mock_rpc + + request = {} + client.update_network_interface_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_network_interface_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_network_interface_unary_rest_required_fields( request_type=compute.UpdateNetworkInterfaceInstanceRequest, ): @@ -32875,6 +36253,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_shielded_instance_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_shielded_instance_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_shielded_instance_config + ] = mock_rpc + + request = {} + client.update_shielded_instance_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_shielded_instance_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_shielded_instance_config_rest_required_fields( request_type=compute.UpdateShieldedInstanceConfigInstanceRequest, ): @@ -33276,6 +36699,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_shielded_instance_config_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_shielded_instance_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_shielded_instance_config + ] = mock_rpc + + request = {} + client.update_shielded_instance_config_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_shielded_instance_config_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_shielded_instance_config_unary_rest_required_fields( request_type=compute.UpdateShieldedInstanceConfigInstanceRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py index 5285aa19efea..ef4dbc7a6ed8 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py @@ -1025,6 +1025,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListInstantSnapshotsRequest, ): @@ -1432,6 +1468,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteInstantSnapshotRequest): transport_class = transports.InstantSnapshotsRestTransport @@ -1750,6 +1826,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteInstantSnapshotRequest, ): @@ -2082,6 +2198,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetInstantSnapshotRequest): transport_class = transports.InstantSnapshotsRestTransport @@ -2380,6 +2532,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyInstantSnapshotRequest, ): @@ -2808,6 +2996,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertInstantSnapshotRequest): transport_class = transports.InstantSnapshotsRestTransport @@ -3209,6 +3437,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertInstantSnapshotRequest, ): @@ -3504,6 +3772,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListInstantSnapshotsRequest): transport_class = transports.InstantSnapshotsRestTransport @@ -4012,6 +4316,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyInstantSnapshotRequest, ): @@ -4431,6 +4771,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsInstantSnapshotRequest, ): @@ -4830,6 +5210,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsInstantSnapshotRequest, ): @@ -5208,6 +5628,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsInstantSnapshotRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py index d92902f22982..07d4c6a5f131 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_attachments.py @@ -1055,6 +1055,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListInterconnectAttachmentsRequest, ): @@ -1465,6 +1501,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteInterconnectAttachmentRequest, ): @@ -1787,6 +1863,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteInterconnectAttachmentRequest, ): @@ -2165,6 +2281,42 @@ def test_get_rest(request_type): assert response.vlan_tag8021q == 1160 +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetInterconnectAttachmentRequest, ): @@ -2635,6 +2787,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertInterconnectAttachmentRequest, ): @@ -3084,6 +3276,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertInterconnectAttachmentRequest, ): @@ -3389,6 +3621,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListInterconnectAttachmentsRequest, ): @@ -3921,6 +4189,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchInterconnectAttachmentRequest, ): @@ -4381,6 +4689,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchInterconnectAttachmentRequest, ): @@ -4808,6 +5156,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsInterconnectAttachmentRequest, ): @@ -5207,6 +5595,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsInterconnectAttachmentRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py index 8db02750ec4f..4803521e7145 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py @@ -1072,6 +1072,42 @@ def test_get_rest(request_type): assert response.supports_pzs is True +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetInterconnectLocationRequest): transport_class = transports.InterconnectLocationsRestTransport @@ -1357,6 +1393,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListInterconnectLocationsRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py index d7056bdfa422..f369a62fa3a6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py @@ -1088,6 +1088,42 @@ def test_get_rest(request_type): assert response.status == "status_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetInterconnectRemoteLocationRequest, ): @@ -1383,6 +1419,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectRemoteLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListInterconnectRemoteLocationsRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py index d0afd9a2e90c..800cf782498c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnects.py @@ -1035,6 +1035,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteInterconnectRequest): transport_class = transports.InterconnectsRestTransport @@ -1334,6 +1374,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteInterconnectRequest, ): @@ -1665,6 +1745,42 @@ def test_get_rest(request_type): assert response.state == "state_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetInterconnectRequest): transport_class = transports.InterconnectsRestTransport @@ -1937,6 +2053,42 @@ def test_get_diagnostics_rest(request_type): assert isinstance(response, compute.InterconnectsGetDiagnosticsResponse) +def test_get_diagnostics_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_diagnostics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_diagnostics] = mock_rpc + + request = {} + client.get_diagnostics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_diagnostics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_diagnostics_rest_required_fields( request_type=compute.GetDiagnosticsInterconnectRequest, ): @@ -2218,6 +2370,44 @@ def test_get_macsec_config_rest(request_type): assert response.etag == "etag_value" +def test_get_macsec_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_macsec_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_macsec_config + ] = mock_rpc + + request = {} + client.get_macsec_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_macsec_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_macsec_config_rest_required_fields( request_type=compute.GetMacsecConfigInterconnectRequest, ): @@ -2678,6 +2868,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertInterconnectRequest): transport_class = transports.InterconnectsRestTransport @@ -3109,6 +3339,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertInterconnectRequest, ): @@ -3393,6 +3663,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListInterconnectsRequest): transport_class = transports.InterconnectsRestTransport @@ -3911,6 +4217,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchInterconnectRequest): transport_class = transports.InterconnectsRestTransport @@ -4349,6 +4695,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchInterconnectRequest, ): @@ -4755,6 +5141,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsInterconnectRequest, ): @@ -5141,6 +5567,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsInterconnectRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py index 79371a4faf4e..1cb73122aa1e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_license_codes.py @@ -986,6 +986,42 @@ def test_get_rest(request_type): assert response.transferable is True +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetLicenseCodeRequest): transport_class = transports.LicenseCodesRestTransport @@ -1339,6 +1375,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsLicenseCodeRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py index b4da1bd64c96..2a46dfbae7b0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_licenses.py @@ -1003,6 +1003,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteLicenseRequest): transport_class = transports.LicensesRestTransport @@ -1316,6 +1356,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteLicenseRequest): transport_class = transports.LicensesRestTransport @@ -1625,6 +1705,42 @@ def test_get_rest(request_type): assert response.transferable is True +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetLicenseRequest): transport_class = transports.LicensesRestTransport @@ -1912,6 +2028,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyLicenseRequest, ): @@ -2312,6 +2464,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertLicenseRequest): transport_class = transports.LicensesRestTransport @@ -2682,6 +2874,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertLicenseRequest): transport_class = transports.LicensesRestTransport @@ -2957,6 +3189,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListLicensesRequest): transport_class = transports.LicensesRestTransport @@ -3449,6 +3717,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyLicenseRequest, ): @@ -3812,6 +4116,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsLicenseRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py index 5378866baeea..92023e56fbed 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py @@ -1035,6 +1035,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteMachineImageRequest): transport_class = transports.MachineImagesRestTransport @@ -1334,6 +1374,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteMachineImageRequest, ): @@ -1639,6 +1719,42 @@ def test_get_rest(request_type): assert response.total_storage_bytes == 2046 +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetMachineImageRequest): transport_class = transports.MachineImagesRestTransport @@ -1918,6 +2034,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyMachineImageRequest, ): @@ -2231,6 +2383,7 @@ def test_insert_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -2540,6 +2693,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertMachineImageRequest): transport_class = transports.MachineImagesRestTransport @@ -2862,6 +3055,7 @@ def test_insert_unary_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -3149,6 +3343,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertMachineImageRequest, ): @@ -3447,6 +3681,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListMachineImagesRequest): transport_class = transports.MachineImagesRestTransport @@ -3944,6 +4214,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyMachineImageRequest, ): @@ -4309,6 +4615,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineImagesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsMachineImageRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py index 60143e926a92..e8e760ca9f6a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_types.py @@ -981,6 +981,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListMachineTypesRequest, ): @@ -1364,6 +1400,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetMachineTypeRequest): transport_class = transports.MachineTypesRestTransport @@ -1656,6 +1728,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListMachineTypesRequest): transport_class = transports.MachineTypesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py index 4ffb94a82453..0d2ba52c7ea5 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py @@ -1035,6 +1035,42 @@ def test_aggregated_list_rest(request_type): assert response.self_link == "self_link_value" +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListNetworkAttachmentsRequest, ): @@ -1443,6 +1479,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteNetworkAttachmentRequest, ): @@ -1763,6 +1839,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteNetworkAttachmentRequest, ): @@ -2089,6 +2205,42 @@ def test_get_rest(request_type): assert response.subnetworks == ["subnetworks_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetNetworkAttachmentRequest): transport_class = transports.NetworkAttachmentsRestTransport @@ -2387,6 +2539,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyNetworkAttachmentRequest, ): @@ -2830,6 +3018,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertNetworkAttachmentRequest, ): @@ -3256,6 +3484,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertNetworkAttachmentRequest, ): @@ -3559,6 +3827,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListNetworkAttachmentsRequest): transport_class = transports.NetworkAttachmentsRestTransport @@ -4066,6 +4370,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchNetworkAttachmentRequest): transport_class = transports.NetworkAttachmentsRestTransport @@ -4509,6 +4853,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchNetworkAttachmentRequest, ): @@ -4977,6 +5361,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyNetworkAttachmentRequest, ): @@ -5353,6 +5773,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsNetworkAttachmentRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py index 40822d6d3d99..5f667a921eaa 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_edge_security_services.py @@ -1071,6 +1071,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListNetworkEdgeSecurityServicesRequest, ): @@ -1484,6 +1520,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteNetworkEdgeSecurityServiceRequest, ): @@ -1810,6 +1886,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteNetworkEdgeSecurityServiceRequest, ): @@ -2134,6 +2250,42 @@ def test_get_rest(request_type): assert response.self_link_with_id == "self_link_with_id_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetNetworkEdgeSecurityServiceRequest, ): @@ -2567,6 +2719,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertNetworkEdgeSecurityServiceRequest, ): @@ -2977,6 +3169,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertNetworkEdgeSecurityServiceRequest, ): @@ -3413,6 +3645,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchNetworkEdgeSecurityServiceRequest, ): @@ -3848,6 +4120,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEdgeSecurityServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchNetworkEdgeSecurityServiceRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py index a1ecb0275b8e..3c3535fdf6d0 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py @@ -1053,6 +1053,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListNetworkEndpointGroupsRequest, ): @@ -1554,6 +1590,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_attach_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.attach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.attach_network_endpoints + ] = mock_rpc + + request = {} + client.attach_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.attach_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_attach_network_endpoints_rest_required_fields( request_type=compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, ): @@ -1980,6 +2061,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_attach_network_endpoints_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.attach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.attach_network_endpoints + ] = mock_rpc + + request = {} + client.attach_network_endpoints_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.attach_network_endpoints_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_attach_network_endpoints_unary_rest_required_fields( request_type=compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, ): @@ -2335,6 +2461,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteNetworkEndpointGroupRequest, ): @@ -2655,6 +2821,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteNetworkEndpointGroupRequest, ): @@ -3090,6 +3296,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_detach_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_network_endpoints + ] = mock_rpc + + request = {} + client.detach_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.detach_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_network_endpoints_rest_required_fields( request_type=compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, ): @@ -3516,6 +3767,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_detach_network_endpoints_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_network_endpoints + ] = mock_rpc + + request = {} + client.detach_network_endpoints_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.detach_network_endpoints_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_network_endpoints_unary_rest_required_fields( request_type=compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, ): @@ -3855,6 +4151,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetNetworkEndpointGroupRequest): transport_class = transports.NetworkEndpointGroupsRestTransport @@ -4299,6 +4631,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertNetworkEndpointGroupRequest, ): @@ -4716,6 +5088,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertNetworkEndpointGroupRequest, ): @@ -5011,6 +5423,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListNetworkEndpointGroupsRequest, ): @@ -5460,6 +5908,47 @@ def get_message_fields(field): assert response.next_page_token == "next_page_token_value" +def test_list_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_network_endpoints + ] = mock_rpc + + request = {} + client.list_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_network_endpoints_rest_required_fields( request_type=compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, ): @@ -5936,6 +6425,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsNetworkEndpointGroupRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py index 722f0931f805..143f66825ea5 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_firewall_policies.py @@ -1173,6 +1173,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_association] = mock_rpc + + request = {} + client.add_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_association_rest_required_fields( request_type=compute.AddAssociationNetworkFirewallPolicyRequest, ): @@ -1577,6 +1617,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_association_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_association] = mock_rpc + + request = {} + client.add_association_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_association_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_association_unary_rest_required_fields( request_type=compute.AddAssociationNetworkFirewallPolicyRequest, ): @@ -2047,6 +2127,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_rest_required_fields( request_type=compute.AddRuleNetworkFirewallPolicyRequest, ): @@ -2496,6 +2616,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_unary_rest_required_fields( request_type=compute.AddRuleNetworkFirewallPolicyRequest, ): @@ -2839,6 +2999,46 @@ def test_clone_rules_rest(request_type): assert response.zone == "zone_value" +def test_clone_rules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.clone_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.clone_rules] = mock_rpc + + request = {} + client.clone_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.clone_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_clone_rules_rest_required_fields( request_type=compute.CloneRulesNetworkFirewallPolicyRequest, ): @@ -3150,6 +3350,46 @@ def test_clone_rules_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_clone_rules_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.clone_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.clone_rules] = mock_rpc + + request = {} + client.clone_rules_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.clone_rules_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_clone_rules_unary_rest_required_fields( request_type=compute.CloneRulesNetworkFirewallPolicyRequest, ): @@ -3483,6 +3723,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteNetworkFirewallPolicyRequest, ): @@ -3784,6 +4064,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteNetworkFirewallPolicyRequest, ): @@ -4089,6 +4409,42 @@ def test_get_rest(request_type): assert response.short_name == "short_name_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetNetworkFirewallPolicyRequest): transport_class = transports.NetworkFirewallPoliciesRestTransport @@ -4376,6 +4732,42 @@ def test_get_association_rest(request_type): assert response.short_name == "short_name_value" +def test_get_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_association] = mock_rpc + + request = {} + client.get_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_association_rest_required_fields( request_type=compute.GetAssociationNetworkFirewallPolicyRequest, ): @@ -4664,6 +5056,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyNetworkFirewallPolicyRequest, ): @@ -4970,6 +5398,42 @@ def test_get_rule_rest(request_type): assert response.tls_inspect is True +def test_get_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_rule] = mock_rpc + + request = {} + client.get_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rule_rest_required_fields( request_type=compute.GetRuleNetworkFirewallPolicyRequest, ): @@ -5454,6 +5918,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertNetworkFirewallPolicyRequest, ): @@ -5923,6 +6427,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertNetworkFirewallPolicyRequest, ): @@ -6217,6 +6761,42 @@ def test_list_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListNetworkFirewallPoliciesRequest, ): @@ -6761,6 +7341,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchNetworkFirewallPolicyRequest, ): @@ -7237,6 +7857,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchNetworkFirewallPolicyRequest, ): @@ -7704,6 +8364,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_rest_required_fields( request_type=compute.PatchRuleNetworkFirewallPolicyRequest, ): @@ -8151,6 +8851,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_unary_rest_required_fields( request_type=compute.PatchRuleNetworkFirewallPolicyRequest, ): @@ -8492,6 +9232,50 @@ def test_remove_association_rest(request_type): assert response.zone == "zone_value" +def test_remove_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_association in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_association + ] = mock_rpc + + request = {} + client.remove_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_association_rest_required_fields( request_type=compute.RemoveAssociationNetworkFirewallPolicyRequest, ): @@ -8804,6 +9588,50 @@ def test_remove_association_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_remove_association_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_association in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_association + ] = mock_rpc + + request = {} + client.remove_association_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_association_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_association_unary_rest_required_fields( request_type=compute.RemoveAssociationNetworkFirewallPolicyRequest, ): @@ -9138,6 +9966,46 @@ def test_remove_rule_rest(request_type): assert response.zone == "zone_value" +def test_remove_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_rest_required_fields( request_type=compute.RemoveRuleNetworkFirewallPolicyRequest, ): @@ -9449,6 +10317,46 @@ def test_remove_rule_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_remove_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_unary_rest_required_fields( request_type=compute.RemoveRuleNetworkFirewallPolicyRequest, ): @@ -9896,6 +10804,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyNetworkFirewallPolicyRequest, ): @@ -10262,6 +11206,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsNetworkFirewallPolicyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py index 91bf3850cbc7..e1af496be678 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_networks.py @@ -1099,6 +1099,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_peering_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_peering in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_peering] = mock_rpc + + request = {} + client.add_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_peering(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_peering_rest_required_fields( request_type=compute.AddPeeringNetworkRequest, ): @@ -1502,6 +1542,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_peering_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_peering in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_peering] = mock_rpc + + request = {} + client.add_peering_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_peering_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_peering_unary_rest_required_fields( request_type=compute.AddPeeringNetworkRequest, ): @@ -1831,6 +1911,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteNetworkRequest): transport_class = transports.NetworksRestTransport @@ -2126,6 +2246,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteNetworkRequest): transport_class = transports.NetworksRestTransport @@ -2434,6 +2594,42 @@ def test_get_rest(request_type): assert response.subnetworks == ["subnetworks_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetNetworkRequest): transport_class = transports.NetworksRestTransport @@ -2704,6 +2900,47 @@ def test_get_effective_firewalls_rest(request_type): assert isinstance(response, compute.NetworksGetEffectiveFirewallsResponse) +def test_get_effective_firewalls_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_effective_firewalls + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_effective_firewalls + ] = mock_rpc + + request = {} + client.get_effective_firewalls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_effective_firewalls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_effective_firewalls_rest_required_fields( request_type=compute.GetEffectiveFirewallsNetworkRequest, ): @@ -3131,6 +3368,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertNetworkRequest): transport_class = transports.NetworksRestTransport @@ -3524,6 +3801,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertNetworkRequest): transport_class = transports.NetworksRestTransport @@ -3801,6 +4118,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListNetworksRequest): transport_class = transports.NetworksRestTransport @@ -4141,6 +4494,46 @@ def test_list_peering_routes_rest(request_type): assert response.self_link == "self_link_value" +def test_list_peering_routes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_peering_routes in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_peering_routes + ] = mock_rpc + + request = {} + client.list_peering_routes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_peering_routes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_peering_routes_rest_required_fields( request_type=compute.ListPeeringRoutesNetworksRequest, ): @@ -4645,6 +5038,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchNetworkRequest): transport_class = transports.NetworksRestTransport @@ -5046,6 +5479,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchNetworkRequest): transport_class = transports.NetworksRestTransport @@ -5448,6 +5921,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_remove_peering_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_peering in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_peering] = mock_rpc + + request = {} + client.remove_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_peering(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_peering_rest_required_fields( request_type=compute.RemovePeeringNetworkRequest, ): @@ -5836,6 +6349,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_remove_peering_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_peering in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_peering] = mock_rpc + + request = {} + client.remove_peering_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_peering_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_peering_unary_rest_required_fields( request_type=compute.RemovePeeringNetworkRequest, ): @@ -6165,6 +6718,51 @@ def test_switch_to_custom_mode_rest(request_type): assert response.zone == "zone_value" +def test_switch_to_custom_mode_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.switch_to_custom_mode + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.switch_to_custom_mode + ] = mock_rpc + + request = {} + client.switch_to_custom_mode(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.switch_to_custom_mode(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_switch_to_custom_mode_rest_required_fields( request_type=compute.SwitchToCustomModeNetworkRequest, ): @@ -6464,6 +7062,51 @@ def test_switch_to_custom_mode_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_switch_to_custom_mode_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.switch_to_custom_mode + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.switch_to_custom_mode + ] = mock_rpc + + request = {} + client.switch_to_custom_mode_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.switch_to_custom_mode_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_switch_to_custom_mode_unary_rest_required_fields( request_type=compute.SwitchToCustomModeNetworkRequest, ): @@ -6881,6 +7524,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_peering_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_peering in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_peering] = mock_rpc + + request = {} + client.update_peering(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_peering(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_peering_rest_required_fields( request_type=compute.UpdatePeeringNetworkRequest, ): @@ -7284,6 +7967,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_peering_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_peering in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_peering] = mock_rpc + + request = {} + client.update_peering_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_peering_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_peering_unary_rest_required_fields( request_type=compute.UpdatePeeringNetworkRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py index f5bf1e8735b1..3ece693280be 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_groups.py @@ -1095,6 +1095,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_nodes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_nodes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_nodes] = mock_rpc + + request = {} + client.add_nodes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_nodes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_nodes_rest_required_fields(request_type=compute.AddNodesNodeGroupRequest): transport_class = transports.NodeGroupsRestTransport @@ -1494,6 +1534,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_nodes_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_nodes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_nodes] = mock_rpc + + request = {} + client.add_nodes_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_nodes_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_nodes_unary_rest_required_fields( request_type=compute.AddNodesNodeGroupRequest, ): @@ -1802,6 +1882,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListNodeGroupsRequest, ): @@ -2200,6 +2316,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteNodeGroupRequest): transport_class = transports.NodeGroupsRestTransport @@ -2508,6 +2664,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteNodeGroupRequest): transport_class = transports.NodeGroupsRestTransport @@ -2921,6 +3117,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_delete_nodes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_nodes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_nodes] = mock_rpc + + request = {} + client.delete_nodes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_nodes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_nodes_rest_required_fields( request_type=compute.DeleteNodesNodeGroupRequest, ): @@ -3324,6 +3560,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_delete_nodes_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_nodes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_nodes] = mock_rpc + + request = {} + client.delete_nodes_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_nodes_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_nodes_unary_rest_required_fields( request_type=compute.DeleteNodesNodeGroupRequest, ): @@ -3611,6 +3887,7 @@ def test_get_rest(request_type): id=205, kind="kind_value", location_hint="location_hint_value", + maintenance_interval="maintenance_interval_value", maintenance_policy="maintenance_policy_value", name="name_value", node_template="node_template_value", @@ -3639,6 +3916,7 @@ def test_get_rest(request_type): assert response.id == 205 assert response.kind == "kind_value" assert response.location_hint == "location_hint_value" + assert response.maintenance_interval == "maintenance_interval_value" assert response.maintenance_policy == "maintenance_policy_value" assert response.name == "name_value" assert response.node_template == "node_template_value" @@ -3648,6 +3926,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetNodeGroupRequest): transport_class = transports.NodeGroupsRestTransport @@ -3938,6 +4252,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyNodeGroupRequest, ): @@ -4217,6 +4567,7 @@ def test_insert_rest(request_type): "id": 205, "kind": "kind_value", "location_hint": "location_hint_value", + "maintenance_interval": "maintenance_interval_value", "maintenance_policy": "maintenance_policy_value", "maintenance_window": { "maintenance_duration": {"nanos": 543, "seconds": 751}, @@ -4364,6 +4715,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertNodeGroupRequest): transport_class = transports.NodeGroupsRestTransport @@ -4661,6 +5052,7 @@ def test_insert_unary_rest(request_type): "id": 205, "kind": "kind_value", "location_hint": "location_hint_value", + "maintenance_interval": "maintenance_interval_value", "maintenance_policy": "maintenance_policy_value", "maintenance_window": { "maintenance_duration": {"nanos": 543, "seconds": 751}, @@ -4786,6 +5178,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertNodeGroupRequest): transport_class = transports.NodeGroupsRestTransport @@ -5102,6 +5534,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListNodeGroupsRequest): transport_class = transports.NodeGroupsRestTransport @@ -5458,6 +5926,42 @@ def test_list_nodes_rest(request_type): assert response.self_link == "self_link_value" +def test_list_nodes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_nodes in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_nodes] = mock_rpc + + request = {} + client.list_nodes(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_nodes(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_nodes_rest_required_fields( request_type=compute.ListNodesNodeGroupsRequest, ): @@ -5814,6 +6318,7 @@ def test_patch_rest(request_type): "id": 205, "kind": "kind_value", "location_hint": "location_hint_value", + "maintenance_interval": "maintenance_interval_value", "maintenance_policy": "maintenance_policy_value", "maintenance_window": { "maintenance_duration": {"nanos": 543, "seconds": 751}, @@ -5961,6 +6466,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchNodeGroupRequest): transport_class = transports.NodeGroupsRestTransport @@ -6244,6 +6789,7 @@ def test_patch_unary_rest(request_type): "id": 205, "kind": "kind_value", "location_hint": "location_hint_value", + "maintenance_interval": "maintenance_interval_value", "maintenance_policy": "maintenance_policy_value", "maintenance_window": { "maintenance_duration": {"nanos": 543, "seconds": 751}, @@ -6369,6 +6915,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchNodeGroupRequest): transport_class = transports.NodeGroupsRestTransport @@ -6628,36 +7214,958 @@ def test_patch_unary_rest_error(): @pytest.mark.parametrize( "request_type", [ - compute.SetIamPolicyNodeGroupRequest, + compute.PerformMaintenanceNodeGroupRequest, dict, ], ) -def test_set_iam_policy_rest(request_type): +def test_perform_maintenance_rest(request_type): client = NodeGroupsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} - request_init["zone_set_policy_request_resource"] = { - "bindings": [ - { - "binding_id": "binding_id_value", - "condition": { - "description": "description_value", - "expression": "expression_value", - "location": "location_value", - "title": "title_value", - }, - "members": ["members_value1", "members_value2"], - "role": "role_value", - } - ], - "etag": "etag_value", - "policy": { - "audit_configs": [ - { + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init["node_groups_perform_maintenance_request_resource"] = { + "nodes": ["nodes_value1", "nodes_value2"], + "start_time": "start_time_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PerformMaintenanceNodeGroupRequest.meta.fields[ + "node_groups_perform_maintenance_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_perform_maintenance_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "node_groups_perform_maintenance_request_resource" + ][field] + ), + ): + del request_init[ + "node_groups_perform_maintenance_request_resource" + ][field][i][subfield] + else: + del request_init["node_groups_perform_maintenance_request_resource"][ + field + ][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.perform_maintenance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_perform_maintenance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.perform_maintenance in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.perform_maintenance + ] = mock_rpc + + request = {} + client.perform_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.perform_maintenance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_perform_maintenance_rest_required_fields( + request_type=compute.PerformMaintenanceNodeGroupRequest, +): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).perform_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = "node_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).perform_maintenance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == "node_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.perform_maintenance(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_perform_maintenance_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.perform_maintenance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "nodeGroup", + "nodeGroupsPerformMaintenanceRequestResource", + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_perform_maintenance_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_perform_maintenance" + ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "pre_perform_maintenance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PerformMaintenanceNodeGroupRequest.pb( + compute.PerformMaintenanceNodeGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PerformMaintenanceNodeGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.perform_maintenance( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_perform_maintenance_rest_bad_request( + transport: str = "rest", request_type=compute.PerformMaintenanceNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.perform_maintenance(request) + + +def test_perform_maintenance_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + node_group="node_group_value", + node_groups_perform_maintenance_request_resource=compute.NodeGroupsPerformMaintenanceRequest( + nodes=["nodes_value"] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.perform_maintenance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/performMaintenance" + % client.transport._host, + args[1], + ) + + +def test_perform_maintenance_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.perform_maintenance( + compute.PerformMaintenanceNodeGroupRequest(), + project="project_value", + zone="zone_value", + node_group="node_group_value", + node_groups_perform_maintenance_request_resource=compute.NodeGroupsPerformMaintenanceRequest( + nodes=["nodes_value"] + ), + ) + + +def test_perform_maintenance_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PerformMaintenanceNodeGroupRequest, + dict, + ], +) +def test_perform_maintenance_unary_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init["node_groups_perform_maintenance_request_resource"] = { + "nodes": ["nodes_value1", "nodes_value2"], + "start_time": "start_time_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PerformMaintenanceNodeGroupRequest.meta.fields[ + "node_groups_perform_maintenance_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "node_groups_perform_maintenance_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init[ + "node_groups_perform_maintenance_request_resource" + ][field] + ), + ): + del request_init[ + "node_groups_perform_maintenance_request_resource" + ][field][i][subfield] + else: + del request_init["node_groups_perform_maintenance_request_resource"][ + field + ][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.perform_maintenance_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_perform_maintenance_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.perform_maintenance in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.perform_maintenance + ] = mock_rpc + + request = {} + client.perform_maintenance_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.perform_maintenance_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_perform_maintenance_unary_rest_required_fields( + request_type=compute.PerformMaintenanceNodeGroupRequest, +): + transport_class = transports.NodeGroupsRestTransport + + request_init = {} + request_init["node_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).perform_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["nodeGroup"] = "node_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).perform_maintenance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "nodeGroup" in jsonified_request + assert jsonified_request["nodeGroup"] == "node_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.perform_maintenance_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_perform_maintenance_unary_rest_unset_required_fields(): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.perform_maintenance._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "nodeGroup", + "nodeGroupsPerformMaintenanceRequestResource", + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_perform_maintenance_unary_rest_interceptors(null_interceptor): + transport = transports.NodeGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.NodeGroupsRestInterceptor(), + ) + client = NodeGroupsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.NodeGroupsRestInterceptor, "post_perform_maintenance" + ) as post, mock.patch.object( + transports.NodeGroupsRestInterceptor, "pre_perform_maintenance" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.PerformMaintenanceNodeGroupRequest.pb( + compute.PerformMaintenanceNodeGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.PerformMaintenanceNodeGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.perform_maintenance_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_perform_maintenance_unary_rest_bad_request( + transport: str = "rest", request_type=compute.PerformMaintenanceNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.perform_maintenance_unary(request) + + +def test_perform_maintenance_unary_rest_flattened(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + node_group="node_group_value", + node_groups_perform_maintenance_request_resource=compute.NodeGroupsPerformMaintenanceRequest( + nodes=["nodes_value"] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.perform_maintenance_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/performMaintenance" + % client.transport._host, + args[1], + ) + + +def test_perform_maintenance_unary_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.perform_maintenance_unary( + compute.PerformMaintenanceNodeGroupRequest(), + project="project_value", + zone="zone_value", + node_group="node_group_value", + node_groups_perform_maintenance_request_resource=compute.NodeGroupsPerformMaintenanceRequest( + nodes=["nodes_value"] + ), + ) + + +def test_perform_maintenance_unary_rest_error(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetIamPolicyNodeGroupRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { "audit_log_configs": [ { "exempted_members": [ @@ -6821,6 +8329,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyNodeGroupRequest, ): @@ -7246,6 +8790,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_node_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_node_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_node_template + ] = mock_rpc + + request = {} + client.set_node_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_node_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_node_template_rest_required_fields( request_type=compute.SetNodeTemplateNodeGroupRequest, ): @@ -7651,6 +9237,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_node_template_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_node_template in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_node_template + ] = mock_rpc + + request = {} + client.set_node_template_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_node_template_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_node_template_unary_rest_required_fields( request_type=compute.SetNodeTemplateNodeGroupRequest, ): @@ -8078,6 +9706,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_simulate_maintenance_event_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.simulate_maintenance_event + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.simulate_maintenance_event + ] = mock_rpc + + request = {} + client.simulate_maintenance_event(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.simulate_maintenance_event(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_simulate_maintenance_event_rest_required_fields( request_type=compute.SimulateMaintenanceEventNodeGroupRequest, ): @@ -8484,6 +10157,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_simulate_maintenance_event_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.simulate_maintenance_event + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.simulate_maintenance_event + ] = mock_rpc + + request = {} + client.simulate_maintenance_event_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.simulate_maintenance_event_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_simulate_maintenance_event_unary_rest_required_fields( request_type=compute.SimulateMaintenanceEventNodeGroupRequest, ): @@ -8863,6 +10581,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsNodeGroupRequest, ): @@ -9243,6 +11001,7 @@ def test_node_groups_base_transport(): "list", "list_nodes", "patch", + "perform_maintenance", "set_iam_policy", "set_node_template", "simulate_maintenance_event", @@ -9414,6 +11173,9 @@ def test_node_groups_client_transport_session_collision(transport_name): session1 = client1.transport.patch._session session2 = client2.transport.patch._session assert session1 != session2 + session1 = client1.transport.perform_maintenance._session + session2 = client2.transport.perform_maintenance._session + assert session1 != session2 session1 = client1.transport.set_iam_policy._session session2 = client2.transport.set_iam_policy._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py index 05d5c14bf7c7..985bcb30933f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_templates.py @@ -1001,6 +1001,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListNodeTemplatesRequest, ): @@ -1408,6 +1444,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteNodeTemplateRequest): transport_class = transports.NodeTemplatesRestTransport @@ -1726,6 +1802,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteNodeTemplateRequest, ): @@ -2046,6 +2162,42 @@ def test_get_rest(request_type): assert response.status_message == "status_message_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetNodeTemplateRequest): transport_class = transports.NodeTemplatesRestTransport @@ -2340,6 +2492,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyNodeTemplateRequest, ): @@ -2769,6 +2957,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertNodeTemplateRequest): transport_class = transports.NodeTemplatesRestTransport @@ -3171,6 +3399,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertNodeTemplateRequest, ): @@ -3466,6 +3734,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListNodeTemplatesRequest): transport_class = transports.NodeTemplatesRestTransport @@ -3974,6 +4278,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyNodeTemplateRequest, ): @@ -4350,6 +4690,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsNodeTemplateRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py index 6907f00d904b..815832ae0e8e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_node_types.py @@ -971,6 +971,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListNodeTypesRequest, ): @@ -1345,6 +1381,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetNodeTypeRequest): transport_class = transports.NodeTypesRestTransport @@ -1635,6 +1707,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListNodeTypesRequest): transport_class = transports.NodeTypesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py index 76aefe8c9f9a..9336672565f6 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_packet_mirrorings.py @@ -1025,6 +1025,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListPacketMirroringsRequest, ): @@ -1432,6 +1468,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeletePacketMirroringRequest): transport_class = transports.PacketMirroringsRestTransport @@ -1750,6 +1826,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeletePacketMirroringRequest, ): @@ -2066,6 +2182,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetPacketMirroringRequest): transport_class = transports.PacketMirroringsRestTransport @@ -2500,6 +2652,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertPacketMirroringRequest): transport_class = transports.PacketMirroringsRestTransport @@ -2909,6 +3101,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertPacketMirroringRequest, ): @@ -3208,6 +3440,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListPacketMirroringsRequest): transport_class = transports.PacketMirroringsRestTransport @@ -3704,6 +3972,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchPacketMirroringRequest): transport_class = transports.PacketMirroringsRestTransport @@ -4132,6 +4440,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchPacketMirroringRequest, ): @@ -4518,6 +4866,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsPacketMirroringRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py index b4c91bd315f0..6e631ea3849b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_projects.py @@ -1003,6 +1003,48 @@ def test_disable_xpn_host_rest(request_type): assert response.zone == "zone_value" +def test_disable_xpn_host_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.disable_xpn_host in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.disable_xpn_host + ] = mock_rpc + + request = {} + client.disable_xpn_host(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.disable_xpn_host(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_disable_xpn_host_rest_required_fields( request_type=compute.DisableXpnHostProjectRequest, ): @@ -1287,6 +1329,48 @@ def test_disable_xpn_host_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_disable_xpn_host_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.disable_xpn_host in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.disable_xpn_host + ] = mock_rpc + + request = {} + client.disable_xpn_host_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.disable_xpn_host_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_disable_xpn_host_unary_rest_required_fields( request_type=compute.DisableXpnHostProjectRequest, ): @@ -1678,6 +1762,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_disable_xpn_resource_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.disable_xpn_resource in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.disable_xpn_resource + ] = mock_rpc + + request = {} + client.disable_xpn_resource(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.disable_xpn_resource(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_disable_xpn_resource_rest_required_fields( request_type=compute.DisableXpnResourceProjectRequest, ): @@ -2063,6 +2191,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_disable_xpn_resource_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.disable_xpn_resource in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.disable_xpn_resource + ] = mock_rpc + + request = {} + client.disable_xpn_resource_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.disable_xpn_resource_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_disable_xpn_resource_unary_rest_required_fields( request_type=compute.DisableXpnResourceProjectRequest, ): @@ -2385,6 +2557,46 @@ def test_enable_xpn_host_rest(request_type): assert response.zone == "zone_value" +def test_enable_xpn_host_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.enable_xpn_host in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.enable_xpn_host] = mock_rpc + + request = {} + client.enable_xpn_host(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.enable_xpn_host(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_enable_xpn_host_rest_required_fields( request_type=compute.EnableXpnHostProjectRequest, ): @@ -2669,6 +2881,46 @@ def test_enable_xpn_host_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_enable_xpn_host_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.enable_xpn_host in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.enable_xpn_host] = mock_rpc + + request = {} + client.enable_xpn_host_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.enable_xpn_host_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_enable_xpn_host_unary_rest_required_fields( request_type=compute.EnableXpnHostProjectRequest, ): @@ -3060,6 +3312,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_enable_xpn_resource_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.enable_xpn_resource in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.enable_xpn_resource + ] = mock_rpc + + request = {} + client.enable_xpn_resource(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.enable_xpn_resource(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_enable_xpn_resource_rest_required_fields( request_type=compute.EnableXpnResourceProjectRequest, ): @@ -3445,6 +3741,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_enable_xpn_resource_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.enable_xpn_resource in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.enable_xpn_resource + ] = mock_rpc + + request = {} + client.enable_xpn_resource_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.enable_xpn_resource_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_enable_xpn_resource_unary_rest_required_fields( request_type=compute.EnableXpnResourceProjectRequest, ): @@ -3747,6 +4087,42 @@ def test_get_rest(request_type): assert response.xpn_project_status == "xpn_project_status_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetProjectRequest): transport_class = transports.ProjectsRestTransport @@ -4026,6 +4402,42 @@ def test_get_xpn_host_rest(request_type): assert response.xpn_project_status == "xpn_project_status_value" +def test_get_xpn_host_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_xpn_host in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_xpn_host] = mock_rpc + + request = {} + client.get_xpn_host(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_xpn_host(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_xpn_host_rest_required_fields( request_type=compute.GetXpnHostProjectRequest, ): @@ -4290,6 +4702,44 @@ def test_get_xpn_resources_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_get_xpn_resources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_xpn_resources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_xpn_resources + ] = mock_rpc + + request = {} + client.get_xpn_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_xpn_resources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_xpn_resources_rest_required_fields( request_type=compute.GetXpnResourcesProjectsRequest, ): @@ -4719,6 +5169,42 @@ def get_message_fields(field): assert response.self_link == "self_link_value" +def test_list_xpn_hosts_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_xpn_hosts in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_xpn_hosts] = mock_rpc + + request = {} + client.list_xpn_hosts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_xpn_hosts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_xpn_hosts_rest_required_fields( request_type=compute.ListXpnHostsProjectsRequest, ): @@ -5191,6 +5677,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_move_disk_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_disk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move_disk] = mock_rpc + + request = {} + client.move_disk(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_disk(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_disk_rest_required_fields(request_type=compute.MoveDiskProjectRequest): transport_class = transports.ProjectsRestTransport @@ -5563,6 +6089,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_move_disk_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_disk in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move_disk] = mock_rpc + + request = {} + client.move_disk_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_disk_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_disk_unary_rest_required_fields( request_type=compute.MoveDiskProjectRequest, ): @@ -5961,6 +6527,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_move_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc + + request = {} + client.move_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_instance_rest_required_fields( request_type=compute.MoveInstanceProjectRequest, ): @@ -6339,6 +6945,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_move_instance_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move_instance] = mock_rpc + + request = {} + client.move_instance_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.move_instance_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_move_instance_unary_rest_required_fields( request_type=compute.MoveInstanceProjectRequest, ): @@ -6745,6 +7391,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_cloud_armor_tier_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_cloud_armor_tier in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_cloud_armor_tier + ] = mock_rpc + + request = {} + client.set_cloud_armor_tier(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_cloud_armor_tier(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_cloud_armor_tier_rest_required_fields( request_type=compute.SetCloudArmorTierProjectRequest, ): @@ -7130,6 +7820,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_cloud_armor_tier_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_cloud_armor_tier in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_cloud_armor_tier + ] = mock_rpc + + request = {} + client.set_cloud_armor_tier_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_cloud_armor_tier_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_cloud_armor_tier_unary_rest_required_fields( request_type=compute.SetCloudArmorTierProjectRequest, ): @@ -7526,6 +8260,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_common_instance_metadata_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_common_instance_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_common_instance_metadata + ] = mock_rpc + + request = {} + client.set_common_instance_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_common_instance_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_common_instance_metadata_rest_required_fields( request_type=compute.SetCommonInstanceMetadataProjectRequest, ): @@ -7897,6 +8676,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_common_instance_metadata_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_common_instance_metadata + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_common_instance_metadata + ] = mock_rpc + + request = {} + client.set_common_instance_metadata_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_common_instance_metadata_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_common_instance_metadata_unary_rest_required_fields( request_type=compute.SetCommonInstanceMetadataProjectRequest, ): @@ -8303,6 +9127,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_default_network_tier_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_default_network_tier + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_default_network_tier + ] = mock_rpc + + request = {} + client.set_default_network_tier(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_default_network_tier(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_default_network_tier_rest_required_fields( request_type=compute.SetDefaultNetworkTierProjectRequest, ): @@ -8688,6 +9557,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_default_network_tier_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_default_network_tier + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_default_network_tier + ] = mock_rpc + + request = {} + client.set_default_network_tier_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_default_network_tier_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_default_network_tier_unary_rest_required_fields( request_type=compute.SetDefaultNetworkTierProjectRequest, ): @@ -9089,6 +10003,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_usage_export_bucket_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_usage_export_bucket + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_usage_export_bucket + ] = mock_rpc + + request = {} + client.set_usage_export_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_usage_export_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_usage_export_bucket_rest_required_fields( request_type=compute.SetUsageExportBucketProjectRequest, ): @@ -9468,6 +10427,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_usage_export_bucket_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_usage_export_bucket + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_usage_export_bucket + ] = mock_rpc + + request = {} + client.set_usage_export_bucket_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_usage_export_bucket_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_usage_export_bucket_unary_rest_required_fields( request_type=compute.SetUsageExportBucketProjectRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py index 8b456b63a2e2..2b2a18e2cdff 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py @@ -1093,6 +1093,46 @@ def test_announce_rest(request_type): assert response.zone == "zone_value" +def test_announce_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.announce in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.announce] = mock_rpc + + request = {} + client.announce(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.announce(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_announce_rest_required_fields( request_type=compute.AnnouncePublicAdvertisedPrefixeRequest, ): @@ -1396,6 +1436,46 @@ def test_announce_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_announce_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.announce in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.announce] = mock_rpc + + request = {} + client.announce_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.announce_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_announce_unary_rest_required_fields( request_type=compute.AnnouncePublicAdvertisedPrefixeRequest, ): @@ -1721,6 +1801,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeletePublicAdvertisedPrefixeRequest, ): @@ -2024,6 +2144,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeletePublicAdvertisedPrefixeRequest, ): @@ -2331,6 +2491,42 @@ def test_get_rest(request_type): assert response.status == "status_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetPublicAdvertisedPrefixeRequest, ): @@ -2755,6 +2951,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertPublicAdvertisedPrefixeRequest, ): @@ -3156,6 +3392,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertPublicAdvertisedPrefixeRequest, ): @@ -3444,6 +3720,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListPublicAdvertisedPrefixesRequest, ): @@ -3930,6 +4242,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchPublicAdvertisedPrefixeRequest, ): @@ -4340,6 +4692,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchPublicAdvertisedPrefixeRequest, ): @@ -4673,6 +5065,46 @@ def test_withdraw_rest(request_type): assert response.zone == "zone_value" +def test_withdraw_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.withdraw in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.withdraw] = mock_rpc + + request = {} + client.withdraw(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.withdraw(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_withdraw_rest_required_fields( request_type=compute.WithdrawPublicAdvertisedPrefixeRequest, ): @@ -4976,6 +5408,46 @@ def test_withdraw_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_withdraw_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.withdraw in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.withdraw] = mock_rpc + + request = {} + client.withdraw_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.withdraw_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_withdraw_unary_rest_required_fields( request_type=compute.WithdrawPublicAdvertisedPrefixeRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py index 476235d988d9..669074cf99ad 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py @@ -1055,6 +1055,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListPublicDelegatedPrefixesRequest, ): @@ -1463,6 +1499,46 @@ def test_announce_rest(request_type): assert response.zone == "zone_value" +def test_announce_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.announce in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.announce] = mock_rpc + + request = {} + client.announce(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.announce(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_announce_rest_required_fields( request_type=compute.AnnouncePublicDelegatedPrefixeRequest, ): @@ -1783,6 +1859,46 @@ def test_announce_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_announce_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.announce in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.announce] = mock_rpc + + request = {} + client.announce_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.announce_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_announce_unary_rest_required_fields( request_type=compute.AnnouncePublicDelegatedPrefixeRequest, ): @@ -2125,6 +2241,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeletePublicDelegatedPrefixeRequest, ): @@ -2445,6 +2601,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeletePublicDelegatedPrefixeRequest, ): @@ -2726,6 +2922,7 @@ def test_get_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PublicDelegatedPrefix( + allocatable_prefix_length=2626, byoip_api_version="byoip_api_version_value", creation_timestamp="creation_timestamp_value", description="description_value", @@ -2734,6 +2931,7 @@ def test_get_rest(request_type): ip_cidr_range="ip_cidr_range_value", is_live_migration=True, kind="kind_value", + mode="mode_value", name="name_value", parent_prefix="parent_prefix_value", region="region_value", @@ -2754,6 +2952,7 @@ def test_get_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.PublicDelegatedPrefix) + assert response.allocatable_prefix_length == 2626 assert response.byoip_api_version == "byoip_api_version_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" @@ -2762,6 +2961,7 @@ def test_get_rest(request_type): assert response.ip_cidr_range == "ip_cidr_range_value" assert response.is_live_migration is True assert response.kind == "kind_value" + assert response.mode == "mode_value" assert response.name == "name_value" assert response.parent_prefix == "parent_prefix_value" assert response.region == "region_value" @@ -2769,6 +2969,42 @@ def test_get_rest(request_type): assert response.status == "status_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetPublicDelegatedPrefixeRequest, ): @@ -3041,6 +3277,7 @@ def test_insert_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} request_init["public_delegated_prefix_resource"] = { + "allocatable_prefix_length": 2626, "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", @@ -3049,14 +3286,17 @@ def test_insert_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "is_live_migration": True, "kind": "kind_value", + "mode": "mode_value", "name": "name_value", "parent_prefix": "parent_prefix_value", "public_delegated_sub_prefixs": [ { + "allocatable_prefix_length": 2626, "delegatee_project": "delegatee_project_value", "description": "description_value", "ip_cidr_range": "ip_cidr_range_value", "is_address": True, + "mode": "mode_value", "name": "name_value", "region": "region_value", "status": "status_value", @@ -3208,6 +3448,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertPublicDelegatedPrefixeRequest, ): @@ -3405,7 +3685,7 @@ def test_insert_rest_flattened(): project="project_value", region="region_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) mock_args.update(sample_request) @@ -3446,7 +3726,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): project="project_value", region="region_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) @@ -3473,6 +3753,7 @@ def test_insert_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} request_init["public_delegated_prefix_resource"] = { + "allocatable_prefix_length": 2626, "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", @@ -3481,14 +3762,17 @@ def test_insert_unary_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "is_live_migration": True, "kind": "kind_value", + "mode": "mode_value", "name": "name_value", "parent_prefix": "parent_prefix_value", "public_delegated_sub_prefixs": [ { + "allocatable_prefix_length": 2626, "delegatee_project": "delegatee_project_value", "description": "description_value", "ip_cidr_range": "ip_cidr_range_value", "is_address": True, + "mode": "mode_value", "name": "name_value", "region": "region_value", "status": "status_value", @@ -3618,6 +3902,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertPublicDelegatedPrefixeRequest, ): @@ -3815,7 +4139,7 @@ def test_insert_unary_rest_flattened(): project="project_value", region="region_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) mock_args.update(sample_request) @@ -3856,7 +4180,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): project="project_value", region="region_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) @@ -3913,6 +4237,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListPublicDelegatedPrefixesRequest, ): @@ -4247,6 +4607,7 @@ def test_patch_rest(request_type): "public_delegated_prefix": "sample3", } request_init["public_delegated_prefix_resource"] = { + "allocatable_prefix_length": 2626, "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", @@ -4255,14 +4616,17 @@ def test_patch_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "is_live_migration": True, "kind": "kind_value", + "mode": "mode_value", "name": "name_value", "parent_prefix": "parent_prefix_value", "public_delegated_sub_prefixs": [ { + "allocatable_prefix_length": 2626, "delegatee_project": "delegatee_project_value", "description": "description_value", "ip_cidr_range": "ip_cidr_range_value", "is_address": True, + "mode": "mode_value", "name": "name_value", "region": "region_value", "status": "status_value", @@ -4414,6 +4778,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchPublicDelegatedPrefixeRequest, ): @@ -4625,7 +5029,7 @@ def test_patch_rest_flattened(): region="region_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) mock_args.update(sample_request) @@ -4667,7 +5071,7 @@ def test_patch_rest_flattened_error(transport: str = "rest"): region="region_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) @@ -4698,6 +5102,7 @@ def test_patch_unary_rest(request_type): "public_delegated_prefix": "sample3", } request_init["public_delegated_prefix_resource"] = { + "allocatable_prefix_length": 2626, "byoip_api_version": "byoip_api_version_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", @@ -4706,14 +5111,17 @@ def test_patch_unary_rest(request_type): "ip_cidr_range": "ip_cidr_range_value", "is_live_migration": True, "kind": "kind_value", + "mode": "mode_value", "name": "name_value", "parent_prefix": "parent_prefix_value", "public_delegated_sub_prefixs": [ { + "allocatable_prefix_length": 2626, "delegatee_project": "delegatee_project_value", "description": "description_value", "ip_cidr_range": "ip_cidr_range_value", "is_address": True, + "mode": "mode_value", "name": "name_value", "region": "region_value", "status": "status_value", @@ -4843,6 +5251,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchPublicDelegatedPrefixeRequest, ): @@ -5054,7 +5502,7 @@ def test_patch_unary_rest_flattened(): region="region_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) mock_args.update(sample_request) @@ -5096,7 +5544,7 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): region="region_value", public_delegated_prefix="public_delegated_prefix_value", public_delegated_prefix_resource=compute.PublicDelegatedPrefix( - byoip_api_version="byoip_api_version_value" + allocatable_prefix_length=2626 ), ) @@ -5193,6 +5641,46 @@ def test_withdraw_rest(request_type): assert response.zone == "zone_value" +def test_withdraw_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.withdraw in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.withdraw] = mock_rpc + + request = {} + client.withdraw(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.withdraw(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_withdraw_rest_required_fields( request_type=compute.WithdrawPublicDelegatedPrefixeRequest, ): @@ -5513,6 +6001,46 @@ def test_withdraw_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_withdraw_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.withdraw in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.withdraw] = mock_rpc + + request = {} + client.withdraw_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.withdraw_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_withdraw_unary_rest_required_fields( request_type=compute.WithdrawPublicDelegatedPrefixeRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py index 318f1e006008..ac93b866310e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_autoscalers.py @@ -1067,6 +1067,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionAutoscalerRequest, ): @@ -1379,6 +1419,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionAutoscalerRequest, ): @@ -1691,6 +1771,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionAutoscalerRequest): transport_class = transports.RegionAutoscalersRestTransport @@ -2134,6 +2250,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionAutoscalerRequest, ): @@ -2556,6 +2712,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionAutoscalerRequest, ): @@ -2851,6 +3047,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionAutoscalersRequest): transport_class = transports.RegionAutoscalersRestTransport @@ -3356,6 +3588,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchRegionAutoscalerRequest): transport_class = transports.RegionAutoscalersRestTransport @@ -3784,6 +4056,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionAutoscalerRequest, ): @@ -4238,6 +4550,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields( request_type=compute.UpdateRegionAutoscalerRequest, ): @@ -4670,6 +5022,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateRegionAutoscalerRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py index 388b66614e9e..098999b1d01a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py @@ -1091,6 +1091,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionBackendServiceRequest, ): @@ -1411,6 +1451,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionBackendServiceRequest, ): @@ -1761,6 +1841,42 @@ def test_get_rest(request_type): assert response.timeout_sec == 1185 +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionBackendServiceRequest): transport_class = transports.RegionBackendServicesRestTransport @@ -2135,6 +2251,42 @@ def get_message_fields(field): assert response.kind == "kind_value" +def test_get_health_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_health in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_health] = mock_rpc + + request = {} + client.get_health(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_health(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_health_rest_required_fields( request_type=compute.GetHealthRegionBackendServiceRequest, ): @@ -2443,6 +2595,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyRegionBackendServiceRequest, ): @@ -3013,6 +3201,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionBackendServiceRequest, ): @@ -3557,6 +3785,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionBackendServiceRequest, ): @@ -3852,6 +4120,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionBackendServicesRequest, ): @@ -4212,6 +4516,42 @@ def test_list_usable_rest(request_type): assert response.self_link == "self_link_value" +def test_list_usable_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_usable in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_usable] = mock_rpc + + request = {} + client.list_usable(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_usable(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_usable_rest_required_fields( request_type=compute.ListUsableRegionBackendServicesRequest, ): @@ -4847,6 +5187,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchRegionBackendServiceRequest, ): @@ -5410,6 +5790,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionBackendServiceRequest, ): @@ -5870,6 +6290,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyRegionBackendServiceRequest, ): @@ -6293,6 +6749,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_rest_required_fields( request_type=compute.SetSecurityPolicyRegionBackendServiceRequest, ): @@ -6700,6 +7200,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_security_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_unary_rest_required_fields( request_type=compute.SetSecurityPolicyRegionBackendServiceRequest, ): @@ -7083,6 +7627,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsRegionBackendServiceRequest, ): @@ -7665,6 +8249,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields( request_type=compute.UpdateRegionBackendServiceRequest, ): @@ -8228,6 +8852,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateRegionBackendServiceRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py index c77b6e6a0f41..a31db2b4fc9b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py @@ -1033,6 +1033,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListRegionCommitmentsRequest, ): @@ -1426,6 +1462,42 @@ def test_get_rest(request_type): assert response.type_ == "type__value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionCommitmentRequest): transport_class = transports.RegionCommitmentsRestTransport @@ -1918,6 +1990,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionCommitmentRequest, ): @@ -2385,6 +2497,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionCommitmentRequest, ): @@ -2676,6 +2828,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionCommitmentsRequest): transport_class = transports.RegionCommitmentsRestTransport @@ -3232,6 +3420,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields( request_type=compute.UpdateRegionCommitmentRequest, ): @@ -3722,6 +3950,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateRegionCommitmentRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py index 6570d53860aa..2fd60f78f9bc 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disk_types.py @@ -1022,6 +1022,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionDiskTypeRequest): transport_class = transports.RegionDiskTypesRestTransport @@ -1316,6 +1352,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionDiskTypesRequest): transport_class = transports.RegionDiskTypesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py index 8ac5e0fd5298..dfabd121caac 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py @@ -1103,6 +1103,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_resource_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_resource_policies + ] = mock_rpc + + request = {} + client.add_resource_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_resource_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_resource_policies_rest_required_fields( request_type=compute.AddResourcePoliciesRegionDiskRequest, ): @@ -1504,6 +1549,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_resource_policies_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.add_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_resource_policies + ] = mock_rpc + + request = {} + client.add_resource_policies_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_resource_policies_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_resource_policies_unary_rest_required_fields( request_type=compute.AddResourcePoliciesRegionDiskRequest, ): @@ -1920,6 +2010,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_bulk_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.bulk_insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.bulk_insert] = mock_rpc + + request = {} + client.bulk_insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_bulk_insert_rest_required_fields( request_type=compute.BulkInsertRegionDiskRequest, ): @@ -2307,6 +2437,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_bulk_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.bulk_insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.bulk_insert] = mock_rpc + + request = {} + client.bulk_insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_bulk_insert_unary_rest_required_fields( request_type=compute.BulkInsertRegionDiskRequest, ): @@ -2751,6 +2921,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_create_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + + request = {} + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_snapshot_rest_required_fields( request_type=compute.CreateSnapshotRegionDiskRequest, ): @@ -3176,6 +3386,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_create_snapshot_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + + request = {} + client.create_snapshot_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_snapshot_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_snapshot_unary_rest_required_fields( request_type=compute.CreateSnapshotRegionDiskRequest, ): @@ -3510,6 +3760,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteRegionDiskRequest): transport_class = transports.RegionDisksRestTransport @@ -3816,6 +4106,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionDiskRequest, ): @@ -4121,6 +4451,7 @@ def test_get_rest(request_type): source_snapshot_id="source_snapshot_id_value", source_storage_object="source_storage_object_value", status="status_value", + storage_pool="storage_pool_value", type_="type__value", users=["users_value"], zone="zone_value", @@ -4181,11 +4512,48 @@ def test_get_rest(request_type): assert response.source_snapshot_id == "source_snapshot_id_value" assert response.source_storage_object == "source_storage_object_value" assert response.status == "status_value" + assert response.storage_pool == "storage_pool_value" assert response.type_ == "type__value" assert response.users == ["users_value"] assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionDiskRequest): transport_class = transports.RegionDisksRestTransport @@ -4472,6 +4840,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyRegionDiskRequest, ): @@ -4799,6 +5203,7 @@ def test_insert_rest(request_type): "source_snapshot_id": "source_snapshot_id_value", "source_storage_object": "source_storage_object_value", "status": "status_value", + "storage_pool": "storage_pool_value", "type_": "type__value", "users": ["users_value1", "users_value2"], "zone": "zone_value", @@ -4937,6 +5342,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertRegionDiskRequest): transport_class = transports.RegionDisksRestTransport @@ -5265,6 +5710,7 @@ def test_insert_unary_rest(request_type): "source_snapshot_id": "source_snapshot_id_value", "source_storage_object": "source_storage_object_value", "status": "status_value", + "storage_pool": "storage_pool_value", "type_": "type__value", "users": ["users_value1", "users_value2"], "zone": "zone_value", @@ -5381,6 +5827,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionDiskRequest, ): @@ -5682,6 +6168,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionDisksRequest): transport_class = transports.RegionDisksRestTransport @@ -6157,6 +6679,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_remove_resource_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_resource_policies + ] = mock_rpc + + request = {} + client.remove_resource_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_resource_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_resource_policies_rest_required_fields( request_type=compute.RemoveResourcePoliciesRegionDiskRequest, ): @@ -6559,6 +7126,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_remove_resource_policies_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_resource_policies + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_resource_policies + ] = mock_rpc + + request = {} + client.remove_resource_policies_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_resource_policies_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_resource_policies_unary_rest_required_fields( request_type=compute.RemoveResourcePoliciesRegionDiskRequest, ): @@ -6976,6 +7588,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_resize_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_rest_required_fields(request_type=compute.ResizeRegionDiskRequest): transport_class = transports.RegionDisksRestTransport @@ -7368,6 +8020,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_resize_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_unary_rest_required_fields( request_type=compute.ResizeRegionDiskRequest, ): @@ -7820,6 +8512,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyRegionDiskRequest, ): @@ -8239,6 +8967,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsRegionDiskRequest, ): @@ -8638,6 +9406,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsRegionDiskRequest, ): @@ -9065,6 +9873,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_start_async_replication_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.start_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.start_async_replication + ] = mock_rpc + + request = {} + client.start_async_replication(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start_async_replication(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_start_async_replication_rest_required_fields( request_type=compute.StartAsyncReplicationRegionDiskRequest, ): @@ -9466,6 +10319,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_start_async_replication_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.start_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.start_async_replication + ] = mock_rpc + + request = {} + client.start_async_replication_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.start_async_replication_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_start_async_replication_unary_rest_required_fields( request_type=compute.StartAsyncReplicationRegionDiskRequest, ): @@ -9804,6 +10702,51 @@ def test_stop_async_replication_rest(request_type): assert response.zone == "zone_value" +def test_stop_async_replication_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stop_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_async_replication + ] = mock_rpc + + request = {} + client.stop_async_replication(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_async_replication(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_async_replication_rest_required_fields( request_type=compute.StopAsyncReplicationRegionDiskRequest, ): @@ -10112,6 +11055,51 @@ def test_stop_async_replication_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_stop_async_replication_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stop_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_async_replication + ] = mock_rpc + + request = {} + client.stop_async_replication_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_async_replication_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_async_replication_unary_rest_required_fields( request_type=compute.StopAsyncReplicationRegionDiskRequest, ): @@ -10527,6 +11515,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_stop_group_async_replication_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stop_group_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_group_async_replication + ] = mock_rpc + + request = {} + client.stop_group_async_replication(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_group_async_replication(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_group_async_replication_rest_required_fields( request_type=compute.StopGroupAsyncReplicationRegionDiskRequest, ): @@ -10922,6 +11955,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_stop_group_async_replication_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stop_group_async_replication + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stop_group_async_replication + ] = mock_rpc + + request = {} + client.stop_group_async_replication_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.stop_group_async_replication_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_stop_group_async_replication_unary_rest_required_fields( request_type=compute.StopGroupAsyncReplicationRegionDiskRequest, ): @@ -11292,6 +12370,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsRegionDiskRequest, ): @@ -11627,6 +12745,7 @@ def test_update_rest(request_type): "source_snapshot_id": "source_snapshot_id_value", "source_storage_object": "source_storage_object_value", "status": "status_value", + "storage_pool": "storage_pool_value", "type_": "type__value", "users": ["users_value1", "users_value2"], "zone": "zone_value", @@ -11765,6 +12884,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateRegionDiskRequest): transport_class = transports.RegionDisksRestTransport @@ -12102,6 +13261,7 @@ def test_update_unary_rest(request_type): "source_snapshot_id": "source_snapshot_id_value", "source_storage_object": "source_storage_object_value", "status": "status_value", + "storage_pool": "storage_pool_value", "type_": "type__value", "users": ["users_value1", "users_value2"], "zone": "zone_value", @@ -12218,6 +13378,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateRegionDiskRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py index bdb4b0dd11d2..8ffc04803da3 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_check_services.py @@ -1103,6 +1103,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionHealthCheckServiceRequest, ): @@ -1423,6 +1463,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionHealthCheckServiceRequest, ): @@ -1748,6 +1828,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetRegionHealthCheckServiceRequest, ): @@ -2181,6 +2297,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionHealthCheckServiceRequest, ): @@ -2585,6 +2741,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionHealthCheckServiceRequest, ): @@ -2880,6 +3076,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionHealthCheckServicesRequest, ): @@ -3375,6 +3607,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchRegionHealthCheckServiceRequest, ): @@ -3798,6 +4070,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionHealthCheckServiceRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py index 1711b1e9f026..8dee405e1f1b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_checks.py @@ -1075,6 +1075,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionHealthCheckRequest, ): @@ -1395,6 +1435,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionHealthCheckRequest, ): @@ -1717,6 +1797,42 @@ def test_get_rest(request_type): assert response.unhealthy_threshold == 2046 +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionHealthCheckRequest): transport_class = transports.RegionHealthChecksRestTransport @@ -2186,6 +2302,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionHealthCheckRequest, ): @@ -2626,6 +2782,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionHealthCheckRequest, ): @@ -2917,6 +3113,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionHealthChecksRequest): transport_class = transports.RegionHealthChecksRestTransport @@ -3450,6 +3682,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchRegionHealthCheckRequest): transport_class = transports.RegionHealthChecksRestTransport @@ -3907,6 +4179,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionHealthCheckRequest, ): @@ -4388,6 +4700,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields( request_type=compute.UpdateRegionHealthCheckRequest, ): @@ -4847,6 +5199,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateRegionHealthCheckRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py index 2e9e9cbf02f5..6f460403ecdd 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py @@ -1192,6 +1192,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_abandon_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.abandon_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.abandon_instances + ] = mock_rpc + + request = {} + client.abandon_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.abandon_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_abandon_instances_rest_required_fields( request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest, ): @@ -1606,6 +1648,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_abandon_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.abandon_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.abandon_instances + ] = mock_rpc + + request = {} + client.abandon_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.abandon_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_abandon_instances_unary_rest_required_fields( request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest, ): @@ -2047,6 +2131,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_apply_updates_to_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.apply_updates_to_instances + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.apply_updates_to_instances + ] = mock_rpc + + request = {} + client.apply_updates_to_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.apply_updates_to_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_apply_updates_to_instances_rest_required_fields( request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, ): @@ -2468,6 +2597,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_apply_updates_to_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.apply_updates_to_instances + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.apply_updates_to_instances + ] = mock_rpc + + request = {} + client.apply_updates_to_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.apply_updates_to_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_apply_updates_to_instances_unary_rest_required_fields( request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, ): @@ -2918,6 +3092,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_create_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_instances + ] = mock_rpc + + request = {} + client.create_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_instances_rest_required_fields( request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest, ): @@ -3344,6 +3560,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_create_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_instances + ] = mock_rpc + + request = {} + client.create_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_instances_unary_rest_required_fields( request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest, ): @@ -3695,6 +3953,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionInstanceGroupManagerRequest, ): @@ -4016,6 +4314,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionInstanceGroupManagerRequest, ): @@ -4445,6 +4783,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_delete_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_instances + ] = mock_rpc + + request = {} + client.delete_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_instances_rest_required_fields( request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest, ): @@ -4860,6 +5240,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_delete_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_instances + ] = mock_rpc + + request = {} + client.delete_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_instances_unary_rest_required_fields( request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest, ): @@ -5298,6 +5720,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_delete_per_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_per_instance_configs + ] = mock_rpc + + request = {} + client.delete_per_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_per_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_per_instance_configs_rest_required_fields( request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, ): @@ -5716,6 +6183,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_delete_per_instance_configs_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_per_instance_configs + ] = mock_rpc + + request = {} + client.delete_per_instance_configs_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_per_instance_configs_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_per_instance_configs_unary_rest_required_fields( request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, ): @@ -6060,6 +6572,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetRegionInstanceGroupManagerRequest, ): @@ -6550,6 +7098,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionInstanceGroupManagerRequest, ): @@ -7020,6 +7608,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionInstanceGroupManagerRequest, ): @@ -7324,6 +7952,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionInstanceGroupManagersRequest, ): @@ -7686,6 +8350,42 @@ def test_list_errors_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_errors_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_errors in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_errors] = mock_rpc + + request = {} + client.list_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_errors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_errors_rest_required_fields( request_type=compute.ListErrorsRegionInstanceGroupManagersRequest, ): @@ -8075,6 +8775,47 @@ def test_list_managed_instances_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_managed_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_managed_instances + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_managed_instances + ] = mock_rpc + + request = {} + client.list_managed_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_managed_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_managed_instances_rest_required_fields( request_type=compute.ListManagedInstancesRegionInstanceGroupManagersRequest, ): @@ -8455,15 +9196,56 @@ def test_list_per_instance_configs_rest(request_type): return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.pb( return_value ) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_per_instance_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPerInstanceConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_per_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_per_instance_configs + in client._transport._wrapped_methods + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_per_instance_configs(request) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_per_instance_configs + ] = mock_rpc - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPerInstanceConfigsPager) - assert response.next_page_token == "next_page_token_value" + request = {} + client.list_per_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_per_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 def test_list_per_instance_configs_rest_required_fields( @@ -9053,6 +9835,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchRegionInstanceGroupManagerRequest, ): @@ -9541,6 +10363,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionInstanceGroupManagerRequest, ): @@ -9998,6 +10860,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_per_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.patch_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.patch_per_instance_configs + ] = mock_rpc + + request = {} + client.patch_per_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_per_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_per_instance_configs_rest_required_fields( request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, ): @@ -10434,6 +11341,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_per_instance_configs_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.patch_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.patch_per_instance_configs + ] = mock_rpc + + request = {} + client.patch_per_instance_configs_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_per_instance_configs_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_per_instance_configs_unary_rest_required_fields( request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, ): @@ -10878,6 +11830,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_recreate_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.recreate_instances in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.recreate_instances + ] = mock_rpc + + request = {} + client.recreate_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.recreate_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_recreate_instances_rest_required_fields( request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest, ): @@ -11292,6 +12288,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_recreate_instances_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.recreate_instances in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.recreate_instances + ] = mock_rpc + + request = {} + client.recreate_instances_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.recreate_instances_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_recreate_instances_unary_rest_required_fields( request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest, ): @@ -11643,6 +12683,46 @@ def test_resize_rest(request_type): assert response.zone == "zone_value" +def test_resize_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_rest_required_fields( request_type=compute.ResizeRegionInstanceGroupManagerRequest, ): @@ -11989,6 +13069,46 @@ def test_resize_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_resize_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_unary_rest_required_fields( request_type=compute.ResizeRegionInstanceGroupManagerRequest, ): @@ -12444,6 +13564,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_instance_template_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_instance_template + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_instance_template + ] = mock_rpc + + request = {} + client.set_instance_template(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_instance_template(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_instance_template_rest_required_fields( request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, ): @@ -12862,6 +14027,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_instance_template_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_instance_template + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_instance_template + ] = mock_rpc + + request = {} + client.set_instance_template_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_instance_template_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_instance_template_unary_rest_required_fields( request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, ): @@ -13301,6 +14511,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_target_pools_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_target_pools in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_target_pools + ] = mock_rpc + + request = {} + client.set_target_pools(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_target_pools(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_target_pools_rest_required_fields( request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest, ): @@ -13716,6 +14968,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_target_pools_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_target_pools in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_target_pools + ] = mock_rpc + + request = {} + client.set_target_pools_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_target_pools_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_target_pools_unary_rest_required_fields( request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest, ): @@ -14168,6 +15462,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_per_instance_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_per_instance_configs + ] = mock_rpc + + request = {} + client.update_per_instance_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_per_instance_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_per_instance_configs_rest_required_fields( request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, ): @@ -14606,6 +15945,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_per_instance_configs_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_per_instance_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_per_instance_configs + ] = mock_rpc + + request = {} + client.update_per_instance_configs_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_per_instance_configs_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_per_instance_configs_unary_rest_required_fields( request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py index d3c29e41bc1f..c42b76d75341 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_groups.py @@ -1069,6 +1069,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionInstanceGroupRequest): transport_class = transports.RegionInstanceGroupsRestTransport @@ -1369,6 +1405,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionInstanceGroupsRequest, ): @@ -1819,6 +1891,42 @@ def get_message_fields(field): assert response.self_link == "self_link_value" +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_instances_rest_required_fields( request_type=compute.ListInstancesRegionInstanceGroupsRequest, ): @@ -2340,6 +2448,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_named_ports_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_named_ports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_named_ports] = mock_rpc + + request = {} + client.set_named_ports(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_named_ports(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_named_ports_rest_required_fields( request_type=compute.SetNamedPortsRegionInstanceGroupRequest, ): @@ -2755,6 +2903,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_named_ports_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_named_ports in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_named_ports] = mock_rpc + + request = {} + client.set_named_ports_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_named_ports_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_named_ports_unary_rest_required_fields( request_type=compute.SetNamedPortsRegionInstanceGroupRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py index 51158ae22c59..4ee009edbf21 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py @@ -1093,6 +1093,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionInstanceTemplateRequest, ): @@ -1413,6 +1453,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionInstanceTemplateRequest, ): @@ -1727,6 +1807,42 @@ def test_get_rest(request_type): assert response.source_instance == "source_instance_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetRegionInstanceTemplateRequest, ): @@ -2056,6 +2172,7 @@ def test_insert_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -2325,6 +2442,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionInstanceTemplateRequest, ): @@ -2647,6 +2804,7 @@ def test_insert_unary_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -2894,6 +3052,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionInstanceTemplateRequest, ): @@ -3189,6 +3387,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionInstanceTemplatesRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py index 3cbde50d066b..0a395d13ee37 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py @@ -1039,6 +1039,7 @@ def test_bulk_insert_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -1305,6 +1306,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_bulk_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.bulk_insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.bulk_insert] = mock_rpc + + request = {} + client.bulk_insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_bulk_insert_rest_required_fields( request_type=compute.BulkInsertRegionInstanceRequest, ): @@ -1623,6 +1664,7 @@ def test_bulk_insert_unary_rest(request_type): "source_image_encryption_key": {}, "source_snapshot": "source_snapshot_value", "source_snapshot_encryption_key": {}, + "storage_pool": "storage_pool_value", }, "interface": "interface_value", "kind": "kind_value", @@ -1867,6 +1909,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_bulk_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.bulk_insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.bulk_insert] = mock_rpc + + request = {} + client.bulk_insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.bulk_insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_bulk_insert_unary_rest_required_fields( request_type=compute.BulkInsertRegionInstanceRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py index 05b83e0cf82d..03fca5c8ef9e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py @@ -1093,6 +1093,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionInstantSnapshotRequest, ): @@ -1413,6 +1453,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionInstantSnapshotRequest, ): @@ -1745,6 +1825,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionInstantSnapshotRequest): transport_class = transports.RegionInstantSnapshotsRestTransport @@ -2043,6 +2159,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyRegionInstantSnapshotRequest, ): @@ -2472,6 +2624,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionInstantSnapshotRequest, ): @@ -2875,6 +3067,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionInstantSnapshotRequest, ): @@ -3170,6 +3402,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionInstantSnapshotsRequest, ): @@ -3680,6 +3948,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyRegionInstantSnapshotRequest, ): @@ -4100,6 +4404,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsRegionInstantSnapshotRequest, ): @@ -4499,6 +4843,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsRegionInstantSnapshotRequest, ): @@ -4877,6 +5261,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsRegionInstantSnapshotRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py index b139571e817b..868a95c07b8e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py @@ -1202,6 +1202,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_attach_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.attach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.attach_network_endpoints + ] = mock_rpc + + request = {} + client.attach_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.attach_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_attach_network_endpoints_rest_required_fields( request_type=compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, ): @@ -1632,6 +1677,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_attach_network_endpoints_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.attach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.attach_network_endpoints + ] = mock_rpc + + request = {} + client.attach_network_endpoints_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.attach_network_endpoints_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_attach_network_endpoints_unary_rest_required_fields( request_type=compute.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest, ): @@ -1989,6 +2079,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionNetworkEndpointGroupRequest, ): @@ -2310,6 +2440,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionNetworkEndpointGroupRequest, ): @@ -2748,6 +2918,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_detach_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_network_endpoints + ] = mock_rpc + + request = {} + client.detach_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.detach_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_network_endpoints_rest_required_fields( request_type=compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, ): @@ -3178,6 +3393,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_detach_network_endpoints_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_network_endpoints + ] = mock_rpc + + request = {} + client.detach_network_endpoints_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.detach_network_endpoints_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_network_endpoints_unary_rest_required_fields( request_type=compute.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest, ): @@ -3519,6 +3779,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetRegionNetworkEndpointGroupRequest, ): @@ -3965,6 +4261,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionNetworkEndpointGroupRequest, ): @@ -4383,6 +4719,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionNetworkEndpointGroupRequest, ): @@ -4679,6 +5055,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionNetworkEndpointGroupsRequest, ): @@ -5043,6 +5455,47 @@ def test_list_network_endpoints_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_network_endpoints_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_network_endpoints + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_network_endpoints + ] = mock_rpc + + request = {} + client.list_network_endpoints(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_network_endpoints(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_network_endpoints_rest_required_fields( request_type=compute.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py index eb6c8449c9c3..75c53123b763 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_network_firewall_policies.py @@ -1197,6 +1197,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_association] = mock_rpc + + request = {} + client.add_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_association_rest_required_fields( request_type=compute.AddAssociationRegionNetworkFirewallPolicyRequest, ): @@ -1620,6 +1660,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_association_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_association] = mock_rpc + + request = {} + client.add_association_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_association_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_association_unary_rest_required_fields( request_type=compute.AddAssociationRegionNetworkFirewallPolicyRequest, ): @@ -2109,6 +2189,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_rest_required_fields( request_type=compute.AddRuleRegionNetworkFirewallPolicyRequest, ): @@ -2578,6 +2698,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_unary_rest_required_fields( request_type=compute.AddRuleRegionNetworkFirewallPolicyRequest, ): @@ -2941,6 +3101,46 @@ def test_clone_rules_rest(request_type): assert response.zone == "zone_value" +def test_clone_rules_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.clone_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.clone_rules] = mock_rpc + + request = {} + client.clone_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.clone_rules(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_clone_rules_rest_required_fields( request_type=compute.CloneRulesRegionNetworkFirewallPolicyRequest, ): @@ -3272,6 +3472,46 @@ def test_clone_rules_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_clone_rules_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.clone_rules in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.clone_rules] = mock_rpc + + request = {} + client.clone_rules_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.clone_rules_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_clone_rules_unary_rest_required_fields( request_type=compute.CloneRulesRegionNetworkFirewallPolicyRequest, ): @@ -3625,6 +3865,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionNetworkFirewallPolicyRequest, ): @@ -3946,6 +4226,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionNetworkFirewallPolicyRequest, ): @@ -4271,6 +4591,42 @@ def test_get_rest(request_type): assert response.short_name == "short_name_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetRegionNetworkFirewallPolicyRequest, ): @@ -4579,6 +4935,42 @@ def test_get_association_rest(request_type): assert response.short_name == "short_name_value" +def test_get_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_association in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_association] = mock_rpc + + request = {} + client.get_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_association_rest_required_fields( request_type=compute.GetAssociationRegionNetworkFirewallPolicyRequest, ): @@ -4883,6 +5275,47 @@ def test_get_effective_firewalls_rest(request_type): ) +def test_get_effective_firewalls_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_effective_firewalls + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_effective_firewalls + ] = mock_rpc + + request = {} + client.get_effective_firewalls(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_effective_firewalls(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_effective_firewalls_rest_required_fields( request_type=compute.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest, ): @@ -5202,6 +5635,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyRegionNetworkFirewallPolicyRequest, ): @@ -5523,6 +5992,42 @@ def test_get_rule_rest(request_type): assert response.tls_inspect is True +def test_get_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_rule] = mock_rpc + + request = {} + client.get_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rule_rest_required_fields( request_type=compute.GetRuleRegionNetworkFirewallPolicyRequest, ): @@ -6023,6 +6528,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionNetworkFirewallPolicyRequest, ): @@ -6500,6 +7045,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionNetworkFirewallPolicyRequest, ): @@ -6802,6 +7387,42 @@ def test_list_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionNetworkFirewallPoliciesRequest, ): @@ -7362,6 +7983,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchRegionNetworkFirewallPolicyRequest, ): @@ -7858,6 +8519,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionNetworkFirewallPolicyRequest, ): @@ -8345,6 +9046,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_rest_required_fields( request_type=compute.PatchRuleRegionNetworkFirewallPolicyRequest, ): @@ -8812,6 +9553,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_unary_rest_required_fields( request_type=compute.PatchRuleRegionNetworkFirewallPolicyRequest, ): @@ -9173,6 +9954,50 @@ def test_remove_association_rest(request_type): assert response.zone == "zone_value" +def test_remove_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_association in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_association + ] = mock_rpc + + request = {} + client.remove_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_association_rest_required_fields( request_type=compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, ): @@ -9506,6 +10331,50 @@ def test_remove_association_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_remove_association_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_association in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_association + ] = mock_rpc + + request = {} + client.remove_association_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_association_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_association_unary_rest_required_fields( request_type=compute.RemoveAssociationRegionNetworkFirewallPolicyRequest, ): @@ -9861,6 +10730,46 @@ def test_remove_rule_rest(request_type): assert response.zone == "zone_value" +def test_remove_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_rest_required_fields( request_type=compute.RemoveRuleRegionNetworkFirewallPolicyRequest, ): @@ -10192,6 +11101,46 @@ def test_remove_rule_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_remove_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_unary_rest_required_fields( request_type=compute.RemoveRuleRegionNetworkFirewallPolicyRequest, ): @@ -10655,6 +11604,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyRegionNetworkFirewallPolicyRequest, ): @@ -11034,6 +12019,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNetworkFirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsRegionNetworkFirewallPolicyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py index aace1521428b..d47448b8ac22 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py @@ -1107,6 +1107,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionNotificationEndpointRequest, ): @@ -1428,6 +1468,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionNotificationEndpointRequest, ): @@ -1741,6 +1821,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetRegionNotificationEndpointRequest, ): @@ -2170,6 +2286,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionNotificationEndpointRequest, ): @@ -2571,6 +2727,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionNotificationEndpointRequest, ): @@ -2867,6 +3063,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionNotificationEndpointsRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py index e043e8118d91..425d368b2aa7 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_operations.py @@ -1007,6 +1007,42 @@ def test_delete_rest(request_type): assert isinstance(response, compute.DeleteRegionOperationResponse) +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteRegionOperationRequest): transport_class = transports.RegionOperationsRestTransport @@ -1339,6 +1375,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionOperationRequest): transport_class = transports.RegionOperationsRestTransport @@ -1633,6 +1705,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionOperationsRequest): transport_class = transports.RegionOperationsRestTransport @@ -2027,6 +2135,42 @@ def test_wait_rest(request_type): assert response.zone == "zone_value" +def test_wait_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.wait in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.wait] = mock_rpc + + request = {} + client.wait(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.wait(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_wait_rest_required_fields(request_type=compute.WaitRegionOperationRequest): transport_class = transports.RegionOperationsRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py index 57aaaabc5464..5fc37e70d0ce 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_security_policies.py @@ -1256,6 +1256,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_rest_required_fields( request_type=compute.AddRuleRegionSecurityPolicyRequest, ): @@ -1747,6 +1787,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_unary_rest_required_fields( request_type=compute.AddRuleRegionSecurityPolicyRequest, ): @@ -2097,6 +2177,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionSecurityPolicyRequest, ): @@ -2417,6 +2537,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionSecurityPolicyRequest, ): @@ -2735,6 +2895,42 @@ def test_get_rest(request_type): assert response.type_ == "type__value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionSecurityPolicyRequest): transport_class = transports.RegionSecurityPoliciesRestTransport @@ -3041,6 +3237,42 @@ def test_get_rule_rest(request_type): assert response.priority == 898 +def test_get_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_rule] = mock_rpc + + request = {} + client.get_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rule_rest_required_fields( request_type=compute.GetRuleRegionSecurityPolicyRequest, ): @@ -3604,6 +3836,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionSecurityPolicyRequest, ): @@ -4154,6 +4426,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionSecurityPolicyRequest, ): @@ -4465,6 +4777,42 @@ def test_list_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionSecurityPoliciesRequest, ): @@ -5088,6 +5436,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchRegionSecurityPolicyRequest, ): @@ -5657,6 +6045,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionSecurityPolicyRequest, ): @@ -6188,6 +6616,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_rest_required_fields( request_type=compute.PatchRuleRegionSecurityPolicyRequest, ): @@ -6691,6 +7159,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_unary_rest_required_fields( request_type=compute.PatchRuleRegionSecurityPolicyRequest, ): @@ -7053,6 +7561,46 @@ def test_remove_rule_rest(request_type): assert response.zone == "zone_value" +def test_remove_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_rest_required_fields( request_type=compute.RemoveRuleRegionSecurityPolicyRequest, ): @@ -7373,6 +7921,46 @@ def test_remove_rule_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_remove_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_unary_rest_required_fields( request_type=compute.RemoveRuleRegionSecurityPolicyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py index 59c2dab40529..c7a8ce8c13fc 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py @@ -1091,6 +1091,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionSslCertificateRequest, ): @@ -1411,6 +1451,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionSslCertificateRequest, ): @@ -1733,6 +1813,42 @@ def test_get_rest(request_type): assert response.type_ == "type__value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionSslCertificateRequest): transport_class = transports.RegionSslCertificatesRestTransport @@ -2166,6 +2282,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionSslCertificateRequest, ): @@ -2572,6 +2728,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionSslCertificateRequest, ): @@ -2867,6 +3063,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionSslCertificatesRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py index dc672de409a3..1610c6bb0c9e 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_ssl_policies.py @@ -1067,6 +1067,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteRegionSslPolicyRequest): transport_class = transports.RegionSslPoliciesRestTransport @@ -1377,6 +1417,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionSslPolicyRequest, ): @@ -1691,6 +1771,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionSslPolicyRequest): transport_class = transports.RegionSslPoliciesRestTransport @@ -2109,6 +2225,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertRegionSslPolicyRequest): transport_class = transports.RegionSslPoliciesRestTransport @@ -2504,6 +2660,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionSslPolicyRequest, ): @@ -2799,6 +2995,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionSslPoliciesRequest): transport_class = transports.RegionSslPoliciesRestTransport @@ -3151,6 +3383,47 @@ def test_list_available_features_rest(request_type): assert response.features == ["features_value"] +def test_list_available_features_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_available_features + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_available_features + ] = mock_rpc + + request = {} + client.list_available_features(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_available_features(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_available_features_rest_required_fields( request_type=compute.ListAvailableFeaturesRegionSslPoliciesRequest, ): @@ -3585,6 +3858,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchRegionSslPolicyRequest): transport_class = transports.RegionSslPoliciesRestTransport @@ -3991,6 +4304,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionSslPolicyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py index 7db41cb7ab6a..922254e31d59 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py @@ -1093,6 +1093,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionTargetHttpProxyRequest, ): @@ -1413,6 +1453,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionTargetHttpProxyRequest, ): @@ -1733,6 +1813,42 @@ def test_get_rest(request_type): assert response.url_map == "url_map_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionTargetHttpProxyRequest): transport_class = transports.RegionTargetHttpProxiesRestTransport @@ -2155,6 +2271,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionTargetHttpProxyRequest, ): @@ -2550,6 +2706,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionTargetHttpProxyRequest, ): @@ -2845,6 +3041,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionTargetHttpProxiesRequest, ): @@ -3319,6 +3551,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_url_map_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_url_map in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_url_map] = mock_rpc + + request = {} + client.set_url_map(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_url_map(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_url_map_rest_required_fields( request_type=compute.SetUrlMapRegionTargetHttpProxyRequest, ): @@ -3717,6 +3989,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_url_map_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_url_map in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_url_map] = mock_rpc + + request = {} + client.set_url_map_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_url_map_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_url_map_unary_rest_required_fields( request_type=compute.SetUrlMapRegionTargetHttpProxyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py index 1cb5c6aa24e9..3925474d6269 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py @@ -1097,6 +1097,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionTargetHttpsProxyRequest, ): @@ -1417,6 +1457,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionTargetHttpsProxyRequest, ): @@ -1749,6 +1829,42 @@ def test_get_rest(request_type): assert response.url_map == "url_map_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields( request_type=compute.GetRegionTargetHttpsProxyRequest, ): @@ -2179,6 +2295,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionTargetHttpsProxyRequest, ): @@ -2580,6 +2736,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionTargetHttpsProxyRequest, ): @@ -2875,6 +3071,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionTargetHttpsProxiesRequest, ): @@ -3367,6 +3599,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields( request_type=compute.PatchRegionTargetHttpsProxyRequest, ): @@ -3787,6 +4059,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionTargetHttpsProxyRequest, ): @@ -4222,6 +4534,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_ssl_certificates_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_ssl_certificates in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_ssl_certificates + ] = mock_rpc + + request = {} + client.set_ssl_certificates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_certificates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_certificates_rest_required_fields( request_type=compute.SetSslCertificatesRegionTargetHttpsProxyRequest, ): @@ -4636,6 +4992,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_ssl_certificates_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_ssl_certificates in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_ssl_certificates + ] = mock_rpc + + request = {} + client.set_ssl_certificates_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_certificates_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_certificates_unary_rest_required_fields( request_type=compute.SetSslCertificatesRegionTargetHttpsProxyRequest, ): @@ -5061,6 +5461,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_url_map_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_url_map in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_url_map] = mock_rpc + + request = {} + client.set_url_map(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_url_map(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_url_map_rest_required_fields( request_type=compute.SetUrlMapRegionTargetHttpsProxyRequest, ): @@ -5459,6 +5899,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_url_map_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_url_map in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_url_map] = mock_rpc + + request = {} + client.set_url_map_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_url_map_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_url_map_unary_rest_required_fields( request_type=compute.SetUrlMapRegionTargetHttpsProxyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py index 457ca292a006..9d1c5219c87a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_target_tcp_proxies.py @@ -1093,6 +1093,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteRegionTargetTcpProxyRequest, ): @@ -1413,6 +1453,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionTargetTcpProxyRequest, ): @@ -1731,6 +1811,42 @@ def test_get_rest(request_type): assert response.service == "service_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionTargetTcpProxyRequest): transport_class = transports.RegionTargetTcpProxiesRestTransport @@ -2152,6 +2268,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertRegionTargetTcpProxyRequest, ): @@ -2546,6 +2702,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionTargetTcpProxyRequest, ): @@ -2841,6 +3037,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionTargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields( request_type=compute.ListRegionTargetTcpProxiesRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py index 49e7933e7926..5261d47b8e94 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py @@ -1035,6 +1035,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteRegionUrlMapRequest): transport_class = transports.RegionUrlMapsRestTransport @@ -1345,6 +1385,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteRegionUrlMapRequest, ): @@ -1653,6 +1733,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionUrlMapRequest): transport_class = transports.RegionUrlMapsRestTransport @@ -2219,6 +2335,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertRegionUrlMapRequest): transport_class = transports.RegionUrlMapsRestTransport @@ -2764,6 +2920,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertRegionUrlMapRequest, ): @@ -3059,6 +3255,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionUrlMapsRequest): transport_class = transports.RegionUrlMapsRestTransport @@ -3689,6 +3921,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchRegionUrlMapRequest): transport_class = transports.RegionUrlMapsRestTransport @@ -4245,6 +4517,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchRegionUrlMapRequest, ): @@ -4825,6 +5137,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateRegionUrlMapRequest): transport_class = transports.RegionUrlMapsRestTransport @@ -5381,6 +5733,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateRegionUrlMapRequest, ): @@ -5934,6 +6326,42 @@ def get_message_fields(field): assert isinstance(response, compute.UrlMapsValidateResponse) +def test_validate_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.validate] = mock_rpc + + request = {} + client.validate(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_validate_rest_required_fields( request_type=compute.ValidateRegionUrlMapRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py index 98704e5dd699..42a7fbdfe248 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_zones.py @@ -975,6 +975,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionZonesRequest): transport_class = transports.RegionZonesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py index 131a04651db9..b2917db84f8c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_regions.py @@ -961,6 +961,42 @@ def test_get_rest(request_type): assert response.zones == ["zones_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRegionRequest): transport_class = transports.RegionsRestTransport @@ -1240,6 +1276,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRegionsRequest): transport_class = transports.RegionsRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py index 0c2ba2ed90dc..c00af7577ff2 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py @@ -988,6 +988,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListReservationsRequest, ): @@ -1389,6 +1425,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteReservationRequest): transport_class = transports.ReservationsRestTransport @@ -1699,6 +1775,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteReservationRequest, ): @@ -2011,6 +2127,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetReservationRequest): transport_class = transports.ReservationsRestTransport @@ -2301,6 +2453,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyReservationRequest, ): @@ -2755,6 +2943,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertReservationRequest): transport_class = transports.ReservationsRestTransport @@ -3198,6 +3426,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertReservationRequest, ): @@ -3509,6 +3777,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListReservationsRequest): transport_class = transports.ReservationsRestTransport @@ -3981,6 +4285,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_resize_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_rest_required_fields(request_type=compute.ResizeReservationRequest): transport_class = transports.ReservationsRestTransport @@ -4377,6 +4721,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_resize_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.resize in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.resize] = mock_rpc + + request = {} + client.resize_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.resize_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_resize_unary_rest_required_fields( request_type=compute.ResizeReservationRequest, ): @@ -4833,6 +5217,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyReservationRequest, ): @@ -5209,6 +5629,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsReservationRequest, ): @@ -5671,6 +6131,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateReservationRequest): transport_class = transports.ReservationsRestTransport @@ -6137,6 +6637,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields( request_type=compute.UpdateReservationRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py index 2be17d6a6cdd..d5e92cf82f55 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_resource_policies.py @@ -1027,6 +1027,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListResourcePoliciesRequest, ): @@ -1434,6 +1470,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteResourcePolicyRequest): transport_class = transports.ResourcePoliciesRestTransport @@ -1752,6 +1828,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteResourcePolicyRequest, ): @@ -2066,6 +2182,42 @@ def test_get_rest(request_type): assert response.status == "status_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetResourcePolicyRequest): transport_class = transports.ResourcePoliciesRestTransport @@ -2364,6 +2516,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyResourcePolicyRequest, ): @@ -2834,6 +3022,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertResourcePolicyRequest): transport_class = transports.ResourcePoliciesRestTransport @@ -3277,6 +3505,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertResourcePolicyRequest, ): @@ -3574,6 +3842,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListResourcePoliciesRequest): transport_class = transports.ResourcePoliciesRestTransport @@ -4108,6 +4412,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchResourcePolicyRequest): transport_class = transports.ResourcePoliciesRestTransport @@ -4580,6 +4924,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchResourcePolicyRequest, ): @@ -5050,6 +5434,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyResourcePolicyRequest, ): @@ -5426,6 +5846,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsResourcePolicyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py index 93000fc4e3c9..ee3a34efa98c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routers.py @@ -960,6 +960,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListRoutersRequest, ): @@ -1356,6 +1392,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteRouterRequest): transport_class = transports.RoutersRestTransport @@ -1662,6 +1738,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteRouterRequest): transport_class = transports.RoutersRestTransport @@ -1964,6 +2080,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRouterRequest): transport_class = transports.RoutersRestTransport @@ -2245,6 +2397,42 @@ def test_get_nat_ip_info_rest(request_type): assert isinstance(response, compute.NatIpInfoResponse) +def test_get_nat_ip_info_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_nat_ip_info in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_nat_ip_info] = mock_rpc + + request = {} + client.get_nat_ip_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_nat_ip_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_nat_ip_info_rest_required_fields( request_type=compute.GetNatIpInfoRouterRequest, ): @@ -2543,6 +2731,46 @@ def test_get_nat_mapping_info_rest(request_type): assert response.self_link == "self_link_value" +def test_get_nat_mapping_info_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_nat_mapping_info in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_nat_mapping_info + ] = mock_rpc + + request = {} + client.get_nat_mapping_info(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_nat_mapping_info(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_nat_mapping_info_rest_required_fields( request_type=compute.GetNatMappingInfoRoutersRequest, ): @@ -2912,6 +3140,44 @@ def test_get_router_status_rest(request_type): assert response.kind == "kind_value" +def test_get_router_status_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_router_status in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_router_status + ] = mock_rpc + + request = {} + client.get_router_status(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_router_status(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_router_status_rest_required_fields( request_type=compute.GetRouterStatusRouterRequest, ): @@ -3188,6 +3454,7 @@ def test_insert_rest(request_type): {"description": "description_value", "range_": "range__value"} ], "asn": 322, + "identifier_range": "identifier_range_value", "keepalive_interval": 1914, }, "bgp_peers": [ @@ -3208,15 +3475,20 @@ def test_insert_rest(request_type): "custom_learned_ip_ranges": [{"range_": "range__value"}], "custom_learned_route_priority": 3140, "enable": "enable_value", + "enable_ipv4": True, "enable_ipv6": True, + "export_policies": ["export_policies_value1", "export_policies_value2"], + "import_policies": ["import_policies_value1", "import_policies_value2"], "interface_name": "interface_name_value", "ip_address": "ip_address_value", + "ipv4_nexthop_address": "ipv4_nexthop_address_value", "ipv6_nexthop_address": "ipv6_nexthop_address_value", "management_type": "management_type_value", "md5_authentication_key_name": "md5_authentication_key_name_value", "name": "name_value", "peer_asn": 845, "peer_ip_address": "peer_ip_address_value", + "peer_ipv4_nexthop_address": "peer_ipv4_nexthop_address_value", "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", "router_appliance_instance": "router_appliance_instance_value", } @@ -3228,6 +3500,7 @@ def test_insert_rest(request_type): "interfaces": [ { "ip_range": "ip_range_value", + "ip_version": "ip_version_value", "linked_interconnect_attachment": "linked_interconnect_attachment_value", "linked_vpn_tunnel": "linked_vpn_tunnel_value", "management_type": "management_type_value", @@ -3438,6 +3711,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertRouterRequest): transport_class = transports.RoutersRestTransport @@ -3707,6 +4020,7 @@ def test_insert_unary_rest(request_type): {"description": "description_value", "range_": "range__value"} ], "asn": 322, + "identifier_range": "identifier_range_value", "keepalive_interval": 1914, }, "bgp_peers": [ @@ -3727,15 +4041,20 @@ def test_insert_unary_rest(request_type): "custom_learned_ip_ranges": [{"range_": "range__value"}], "custom_learned_route_priority": 3140, "enable": "enable_value", + "enable_ipv4": True, "enable_ipv6": True, + "export_policies": ["export_policies_value1", "export_policies_value2"], + "import_policies": ["import_policies_value1", "import_policies_value2"], "interface_name": "interface_name_value", "ip_address": "ip_address_value", + "ipv4_nexthop_address": "ipv4_nexthop_address_value", "ipv6_nexthop_address": "ipv6_nexthop_address_value", "management_type": "management_type_value", "md5_authentication_key_name": "md5_authentication_key_name_value", "name": "name_value", "peer_asn": 845, "peer_ip_address": "peer_ip_address_value", + "peer_ipv4_nexthop_address": "peer_ipv4_nexthop_address_value", "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", "router_appliance_instance": "router_appliance_instance_value", } @@ -3747,6 +4066,7 @@ def test_insert_unary_rest(request_type): "interfaces": [ { "ip_range": "ip_range_value", + "ip_version": "ip_version_value", "linked_interconnect_attachment": "linked_interconnect_attachment_value", "linked_vpn_tunnel": "linked_vpn_tunnel_value", "management_type": "management_type_value", @@ -3935,6 +4255,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertRouterRequest): transport_class = transports.RoutersRestTransport @@ -4224,6 +4584,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRoutersRequest): transport_class = transports.RoutersRestTransport @@ -4556,6 +4952,7 @@ def test_patch_rest(request_type): {"description": "description_value", "range_": "range__value"} ], "asn": 322, + "identifier_range": "identifier_range_value", "keepalive_interval": 1914, }, "bgp_peers": [ @@ -4576,15 +4973,20 @@ def test_patch_rest(request_type): "custom_learned_ip_ranges": [{"range_": "range__value"}], "custom_learned_route_priority": 3140, "enable": "enable_value", + "enable_ipv4": True, "enable_ipv6": True, + "export_policies": ["export_policies_value1", "export_policies_value2"], + "import_policies": ["import_policies_value1", "import_policies_value2"], "interface_name": "interface_name_value", "ip_address": "ip_address_value", + "ipv4_nexthop_address": "ipv4_nexthop_address_value", "ipv6_nexthop_address": "ipv6_nexthop_address_value", "management_type": "management_type_value", "md5_authentication_key_name": "md5_authentication_key_name_value", "name": "name_value", "peer_asn": 845, "peer_ip_address": "peer_ip_address_value", + "peer_ipv4_nexthop_address": "peer_ipv4_nexthop_address_value", "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", "router_appliance_instance": "router_appliance_instance_value", } @@ -4596,6 +4998,7 @@ def test_patch_rest(request_type): "interfaces": [ { "ip_range": "ip_range_value", + "ip_version": "ip_version_value", "linked_interconnect_attachment": "linked_interconnect_attachment_value", "linked_vpn_tunnel": "linked_vpn_tunnel_value", "management_type": "management_type_value", @@ -4806,6 +5209,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchRouterRequest): transport_class = transports.RoutersRestTransport @@ -5086,6 +5529,7 @@ def test_patch_unary_rest(request_type): {"description": "description_value", "range_": "range__value"} ], "asn": 322, + "identifier_range": "identifier_range_value", "keepalive_interval": 1914, }, "bgp_peers": [ @@ -5106,15 +5550,20 @@ def test_patch_unary_rest(request_type): "custom_learned_ip_ranges": [{"range_": "range__value"}], "custom_learned_route_priority": 3140, "enable": "enable_value", + "enable_ipv4": True, "enable_ipv6": True, + "export_policies": ["export_policies_value1", "export_policies_value2"], + "import_policies": ["import_policies_value1", "import_policies_value2"], "interface_name": "interface_name_value", "ip_address": "ip_address_value", + "ipv4_nexthop_address": "ipv4_nexthop_address_value", "ipv6_nexthop_address": "ipv6_nexthop_address_value", "management_type": "management_type_value", "md5_authentication_key_name": "md5_authentication_key_name_value", "name": "name_value", "peer_asn": 845, "peer_ip_address": "peer_ip_address_value", + "peer_ipv4_nexthop_address": "peer_ipv4_nexthop_address_value", "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", "router_appliance_instance": "router_appliance_instance_value", } @@ -5126,6 +5575,7 @@ def test_patch_unary_rest(request_type): "interfaces": [ { "ip_range": "ip_range_value", + "ip_version": "ip_version_value", "linked_interconnect_attachment": "linked_interconnect_attachment_value", "linked_vpn_tunnel": "linked_vpn_tunnel_value", "management_type": "management_type_value", @@ -5314,6 +5764,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchRouterRequest): transport_class = transports.RoutersRestTransport @@ -5594,6 +6084,7 @@ def test_preview_rest(request_type): {"description": "description_value", "range_": "range__value"} ], "asn": 322, + "identifier_range": "identifier_range_value", "keepalive_interval": 1914, }, "bgp_peers": [ @@ -5614,15 +6105,20 @@ def test_preview_rest(request_type): "custom_learned_ip_ranges": [{"range_": "range__value"}], "custom_learned_route_priority": 3140, "enable": "enable_value", + "enable_ipv4": True, "enable_ipv6": True, + "export_policies": ["export_policies_value1", "export_policies_value2"], + "import_policies": ["import_policies_value1", "import_policies_value2"], "interface_name": "interface_name_value", "ip_address": "ip_address_value", + "ipv4_nexthop_address": "ipv4_nexthop_address_value", "ipv6_nexthop_address": "ipv6_nexthop_address_value", "management_type": "management_type_value", "md5_authentication_key_name": "md5_authentication_key_name_value", "name": "name_value", "peer_asn": 845, "peer_ip_address": "peer_ip_address_value", + "peer_ipv4_nexthop_address": "peer_ipv4_nexthop_address_value", "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", "router_appliance_instance": "router_appliance_instance_value", } @@ -5634,6 +6130,7 @@ def test_preview_rest(request_type): "interfaces": [ { "ip_range": "ip_range_value", + "ip_version": "ip_version_value", "linked_interconnect_attachment": "linked_interconnect_attachment_value", "linked_vpn_tunnel": "linked_vpn_tunnel_value", "management_type": "management_type_value", @@ -5799,6 +6296,42 @@ def get_message_fields(field): assert isinstance(response, compute.RoutersPreviewResponse) +def test_preview_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.preview in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.preview] = mock_rpc + + request = {} + client.preview(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.preview(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_preview_rest_required_fields(request_type=compute.PreviewRouterRequest): transport_class = transports.RoutersRestTransport @@ -6079,6 +6612,7 @@ def test_update_rest(request_type): {"description": "description_value", "range_": "range__value"} ], "asn": 322, + "identifier_range": "identifier_range_value", "keepalive_interval": 1914, }, "bgp_peers": [ @@ -6099,15 +6633,20 @@ def test_update_rest(request_type): "custom_learned_ip_ranges": [{"range_": "range__value"}], "custom_learned_route_priority": 3140, "enable": "enable_value", + "enable_ipv4": True, "enable_ipv6": True, + "export_policies": ["export_policies_value1", "export_policies_value2"], + "import_policies": ["import_policies_value1", "import_policies_value2"], "interface_name": "interface_name_value", "ip_address": "ip_address_value", + "ipv4_nexthop_address": "ipv4_nexthop_address_value", "ipv6_nexthop_address": "ipv6_nexthop_address_value", "management_type": "management_type_value", "md5_authentication_key_name": "md5_authentication_key_name_value", "name": "name_value", "peer_asn": 845, "peer_ip_address": "peer_ip_address_value", + "peer_ipv4_nexthop_address": "peer_ipv4_nexthop_address_value", "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", "router_appliance_instance": "router_appliance_instance_value", } @@ -6119,6 +6658,7 @@ def test_update_rest(request_type): "interfaces": [ { "ip_range": "ip_range_value", + "ip_version": "ip_version_value", "linked_interconnect_attachment": "linked_interconnect_attachment_value", "linked_vpn_tunnel": "linked_vpn_tunnel_value", "management_type": "management_type_value", @@ -6329,6 +6869,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateRouterRequest): transport_class = transports.RoutersRestTransport @@ -6609,6 +7189,7 @@ def test_update_unary_rest(request_type): {"description": "description_value", "range_": "range__value"} ], "asn": 322, + "identifier_range": "identifier_range_value", "keepalive_interval": 1914, }, "bgp_peers": [ @@ -6629,15 +7210,20 @@ def test_update_unary_rest(request_type): "custom_learned_ip_ranges": [{"range_": "range__value"}], "custom_learned_route_priority": 3140, "enable": "enable_value", + "enable_ipv4": True, "enable_ipv6": True, + "export_policies": ["export_policies_value1", "export_policies_value2"], + "import_policies": ["import_policies_value1", "import_policies_value2"], "interface_name": "interface_name_value", "ip_address": "ip_address_value", + "ipv4_nexthop_address": "ipv4_nexthop_address_value", "ipv6_nexthop_address": "ipv6_nexthop_address_value", "management_type": "management_type_value", "md5_authentication_key_name": "md5_authentication_key_name_value", "name": "name_value", "peer_asn": 845, "peer_ip_address": "peer_ip_address_value", + "peer_ipv4_nexthop_address": "peer_ipv4_nexthop_address_value", "peer_ipv6_nexthop_address": "peer_ipv6_nexthop_address_value", "router_appliance_instance": "router_appliance_instance_value", } @@ -6649,6 +7235,7 @@ def test_update_unary_rest(request_type): "interfaces": [ { "ip_range": "ip_range_value", + "ip_version": "ip_version_value", "linked_interconnect_attachment": "linked_interconnect_attachment_value", "linked_vpn_tunnel": "linked_vpn_tunnel_value", "management_type": "management_type_value", @@ -6837,6 +7424,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields(request_type=compute.UpdateRouterRequest): transport_class = transports.RoutersRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py index 6cca722165bc..896569ce8a2a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_routes.py @@ -993,6 +993,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteRouteRequest): transport_class = transports.RoutesRestTransport @@ -1288,6 +1328,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteRouteRequest): transport_class = transports.RoutesRestTransport @@ -1601,6 +1681,42 @@ def test_get_rest(request_type): assert response.tags == ["tags_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetRouteRequest): transport_class = transports.RoutesRestTransport @@ -2015,6 +2131,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertRouteRequest): transport_class = transports.RoutesRestTransport @@ -2409,6 +2565,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertRouteRequest): transport_class = transports.RoutesRestTransport @@ -2690,6 +2886,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListRoutesRequest): transport_class = transports.RoutesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py index 84fe7186ca38..ceb29fbb300c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_security_policies.py @@ -1222,6 +1222,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_rest_required_fields( request_type=compute.AddRuleSecurityPolicyRequest, ): @@ -1694,6 +1734,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_rule] = mock_rpc + + request = {} + client.add_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_rule_unary_rest_required_fields( request_type=compute.AddRuleSecurityPolicyRequest, ): @@ -1993,6 +2073,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListSecurityPoliciesRequest, ): @@ -2396,6 +2512,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteSecurityPolicyRequest): transport_class = transports.SecurityPoliciesRestTransport @@ -2695,6 +2851,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteSecurityPolicyRequest, ): @@ -2994,6 +3190,42 @@ def test_get_rest(request_type): assert response.type_ == "type__value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetSecurityPolicyRequest): transport_class = transports.SecurityPoliciesRestTransport @@ -3281,6 +3513,42 @@ def test_get_rule_rest(request_type): assert response.priority == 898 +def test_get_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_rule] = mock_rpc + + request = {} + client.get_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rule_rest_required_fields( request_type=compute.GetRuleSecurityPolicyRequest, ): @@ -3829,6 +4097,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertSecurityPolicyRequest): transport_class = transports.SecurityPoliciesRestTransport @@ -4370,6 +4678,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertSecurityPolicyRequest, ): @@ -4674,6 +5022,42 @@ def test_list_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListSecurityPoliciesRequest): transport_class = transports.SecurityPoliciesRestTransport @@ -5018,6 +5402,47 @@ def test_list_preconfigured_expression_sets_rest(request_type): ) +def test_list_preconfigured_expression_sets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_preconfigured_expression_sets + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_preconfigured_expression_sets + ] = mock_rpc + + request = {} + client.list_preconfigured_expression_sets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_preconfigured_expression_sets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_preconfigured_expression_sets_rest_required_fields( request_type=compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, ): @@ -5590,6 +6015,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchSecurityPolicyRequest): transport_class = transports.SecurityPoliciesRestTransport @@ -6138,6 +6603,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchSecurityPolicyRequest, ): @@ -6650,6 +7155,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_rest_required_fields( request_type=compute.PatchRuleSecurityPolicyRequest, ): @@ -7134,6 +7679,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch_rule] = mock_rpc + + request = {} + client.patch_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rule_unary_rest_required_fields( request_type=compute.PatchRuleSecurityPolicyRequest, ): @@ -7477,6 +8062,46 @@ def test_remove_rule_rest(request_type): assert response.zone == "zone_value" +def test_remove_rule_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_rest_required_fields( request_type=compute.RemoveRuleSecurityPolicyRequest, ): @@ -7778,6 +8403,46 @@ def test_remove_rule_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_remove_rule_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_rule in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_rule] = mock_rpc + + request = {} + client.remove_rule_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_rule_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_rule_unary_rest_required_fields( request_type=compute.RemoveRuleSecurityPolicyRequest, ): @@ -8180,6 +8845,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsSecurityPolicyRequest, ): @@ -8566,6 +9271,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsSecurityPolicyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py index aeae8b61af77..6f9a425ccd90 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_service_attachments.py @@ -1037,6 +1037,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListServiceAttachmentsRequest, ): @@ -1445,6 +1481,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteServiceAttachmentRequest, ): @@ -1765,6 +1841,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteServiceAttachmentRequest, ): @@ -2095,6 +2211,42 @@ def test_get_rest(request_type): assert response.target_service == "target_service_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetServiceAttachmentRequest): transport_class = transports.ServiceAttachmentsRestTransport @@ -2393,6 +2545,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicyServiceAttachmentRequest, ): @@ -2837,6 +3025,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertServiceAttachmentRequest, ): @@ -3264,6 +3492,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertServiceAttachmentRequest, ): @@ -3567,6 +3835,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListServiceAttachmentsRequest): transport_class = transports.ServiceAttachmentsRestTransport @@ -4075,6 +4379,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchServiceAttachmentRequest): transport_class = transports.ServiceAttachmentsRestTransport @@ -4519,6 +4863,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchServiceAttachmentRequest, ): @@ -4987,6 +5371,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicyServiceAttachmentRequest, ): @@ -5363,6 +5783,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsServiceAttachmentRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py index 826a145889a5..86598706b1df 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py @@ -1043,6 +1043,42 @@ def test_get_rest(request_type): assert isinstance(response, compute.SnapshotSettings) +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetSnapshotSettingRequest): transport_class = transports.SnapshotSettingsServiceRestTransport @@ -1426,6 +1462,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchSnapshotSettingRequest): transport_class = transports.SnapshotSettingsServiceRestTransport @@ -1824,6 +1900,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotSettingsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchSnapshotSettingRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py index b419c7b4db33..5af4297141e7 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py @@ -1010,6 +1010,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteSnapshotRequest): transport_class = transports.SnapshotsRestTransport @@ -1305,6 +1345,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteSnapshotRequest): transport_class = transports.SnapshotsRestTransport @@ -1649,6 +1729,42 @@ def test_get_rest(request_type): assert response.storage_locations == ["storage_locations_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetSnapshotRequest): transport_class = transports.SnapshotsRestTransport @@ -1926,6 +2042,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicySnapshotRequest, ): @@ -2358,6 +2510,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertSnapshotRequest): transport_class = transports.SnapshotsRestTransport @@ -2761,6 +2953,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertSnapshotRequest): transport_class = transports.SnapshotsRestTransport @@ -3039,6 +3271,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListSnapshotsRequest): transport_class = transports.SnapshotsRestTransport @@ -3530,6 +3798,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicySnapshotRequest, ): @@ -3936,6 +4240,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields(request_type=compute.SetLabelsSnapshotRequest): transport_class = transports.SnapshotsRestTransport @@ -4318,6 +4662,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsSnapshotRequest, ): @@ -4681,6 +5065,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsSnapshotRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py index 1f15bc7f75c9..de0d73c21d56 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_certificates.py @@ -1019,6 +1019,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListSslCertificatesRequest, ): @@ -1422,6 +1458,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteSslCertificateRequest): transport_class = transports.SslCertificatesRestTransport @@ -1721,6 +1797,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteSslCertificateRequest, ): @@ -2024,6 +2140,42 @@ def test_get_rest(request_type): assert response.type_ == "type__value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetSslCertificateRequest): transport_class = transports.SslCertificatesRestTransport @@ -2442,6 +2594,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertSslCertificateRequest): transport_class = transports.SslCertificatesRestTransport @@ -2839,6 +3031,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertSslCertificateRequest, ): @@ -3127,6 +3359,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListSslCertificatesRequest): transport_class = transports.SslCertificatesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py index b152823463cc..4b69a88b1780 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_ssl_policies.py @@ -986,6 +986,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListSslPoliciesRequest, ): @@ -1387,6 +1423,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteSslPolicyRequest): transport_class = transports.SslPoliciesRestTransport @@ -1684,6 +1760,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteSslPolicyRequest): transport_class = transports.SslPoliciesRestTransport @@ -1983,6 +2099,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetSslPolicyRequest): transport_class = transports.SslPoliciesRestTransport @@ -2388,6 +2540,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertSslPolicyRequest): transport_class = transports.SslPoliciesRestTransport @@ -2774,6 +2966,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertSslPolicyRequest): transport_class = transports.SslPoliciesRestTransport @@ -3058,6 +3290,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListSslPoliciesRequest): transport_class = transports.SslPoliciesRestTransport @@ -3397,6 +3665,47 @@ def test_list_available_features_rest(request_type): assert response.features == ["features_value"] +def test_list_available_features_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_available_features + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_available_features + ] = mock_rpc + + request = {} + client.list_available_features(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_available_features(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_available_features_rest_required_fields( request_type=compute.ListAvailableFeaturesSslPoliciesRequest, ): @@ -3820,6 +4129,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchSslPolicyRequest): transport_class = transports.SslPoliciesRestTransport @@ -4213,6 +4562,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchSslPolicyRequest): transport_class = transports.SslPoliciesRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py new file mode 100644 index 000000000000..00f41515ffee --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pool_types.py @@ -0,0 +1,2585 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.compute_v1.services.storage_pool_types import ( + StoragePoolTypesClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert StoragePoolTypesClient._get_default_mtls_endpoint(None) is None + assert ( + StoragePoolTypesClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + StoragePoolTypesClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + StoragePoolTypesClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + StoragePoolTypesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + StoragePoolTypesClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert StoragePoolTypesClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert StoragePoolTypesClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert StoragePoolTypesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + StoragePoolTypesClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert StoragePoolTypesClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert StoragePoolTypesClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert StoragePoolTypesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + StoragePoolTypesClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert StoragePoolTypesClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert StoragePoolTypesClient._get_client_cert_source(None, False) is None + assert ( + StoragePoolTypesClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + StoragePoolTypesClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + StoragePoolTypesClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + StoragePoolTypesClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + StoragePoolTypesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(StoragePoolTypesClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = StoragePoolTypesClient._DEFAULT_UNIVERSE + default_endpoint = StoragePoolTypesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = StoragePoolTypesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + StoragePoolTypesClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + StoragePoolTypesClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == StoragePoolTypesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + StoragePoolTypesClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + StoragePoolTypesClient._get_api_endpoint(None, None, default_universe, "always") + == StoragePoolTypesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + StoragePoolTypesClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == StoragePoolTypesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + StoragePoolTypesClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + StoragePoolTypesClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + StoragePoolTypesClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + StoragePoolTypesClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + StoragePoolTypesClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + StoragePoolTypesClient._get_universe_domain(None, None) + == StoragePoolTypesClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + StoragePoolTypesClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (StoragePoolTypesClient, transports.StoragePoolTypesRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (StoragePoolTypesClient, "rest"), + ], +) +def test_storage_pool_types_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.StoragePoolTypesRestTransport, "rest"), + ], +) +def test_storage_pool_types_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (StoragePoolTypesClient, "rest"), + ], +) +def test_storage_pool_types_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_storage_pool_types_client_get_transport_class(): + transport = StoragePoolTypesClient.get_transport_class() + available_transports = [ + transports.StoragePoolTypesRestTransport, + ] + assert transport in available_transports + + transport = StoragePoolTypesClient.get_transport_class("rest") + assert transport == transports.StoragePoolTypesRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (StoragePoolTypesClient, transports.StoragePoolTypesRestTransport, "rest"), + ], +) +@mock.patch.object( + StoragePoolTypesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(StoragePoolTypesClient), +) +def test_storage_pool_types_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(StoragePoolTypesClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(StoragePoolTypesClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + StoragePoolTypesClient, + transports.StoragePoolTypesRestTransport, + "rest", + "true", + ), + ( + StoragePoolTypesClient, + transports.StoragePoolTypesRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + StoragePoolTypesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(StoragePoolTypesClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_storage_pool_types_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [StoragePoolTypesClient]) +@mock.patch.object( + StoragePoolTypesClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(StoragePoolTypesClient), +) +def test_storage_pool_types_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [StoragePoolTypesClient]) +@mock.patch.object( + StoragePoolTypesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(StoragePoolTypesClient), +) +def test_storage_pool_types_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = StoragePoolTypesClient._DEFAULT_UNIVERSE + default_endpoint = StoragePoolTypesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = StoragePoolTypesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (StoragePoolTypesClient, transports.StoragePoolTypesRestTransport, "rest"), + ], +) +def test_storage_pool_types_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + StoragePoolTypesClient, + transports.StoragePoolTypesRestTransport, + "rest", + None, + ), + ], +) +def test_storage_pool_types_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.AggregatedListStoragePoolTypesRequest, + dict, + ], +) +def test_aggregated_list_rest(request_type): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolTypeAggregatedList( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_aggregated_list_rest_required_fields( + request_type=compute.AggregatedListStoragePoolTypesRequest, +): + transport_class = transports.StoragePoolTypesRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "include_all_scopes", + "max_results", + "order_by", + "page_token", + "return_partial_success", + "service_project_number", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolTypeAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.StoragePoolTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "includeAllScopes", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + "serviceProjectNumber", + ) + ) + & set(("project",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolTypesRestInterceptor(), + ) + client = StoragePoolTypesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "post_aggregated_list" + ) as post, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "pre_aggregated_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListStoragePoolTypesRequest.pb( + compute.AggregatedListStoragePoolTypesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.StoragePoolTypeAggregatedList.to_json( + compute.StoragePoolTypeAggregatedList() + ) + + request = compute.AggregatedListStoragePoolTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.StoragePoolTypeAggregatedList() + + client.aggregated_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListStoragePoolTypesRequest +): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolTypeAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolTypeAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/aggregated/storagePoolTypes" + % client.transport._host, + args[1], + ) + + +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListStoragePoolTypesRequest(), + project="project_value", + ) + + +def test_aggregated_list_rest_pager(transport: str = "rest"): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.StoragePoolTypeAggregatedList( + items={ + "a": compute.StoragePoolTypesScopedList(), + "b": compute.StoragePoolTypesScopedList(), + "c": compute.StoragePoolTypesScopedList(), + }, + next_page_token="abc", + ), + compute.StoragePoolTypeAggregatedList( + items={}, + next_page_token="def", + ), + compute.StoragePoolTypeAggregatedList( + items={ + "g": compute.StoragePoolTypesScopedList(), + }, + next_page_token="ghi", + ), + compute.StoragePoolTypeAggregatedList( + items={ + "h": compute.StoragePoolTypesScopedList(), + "i": compute.StoragePoolTypesScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + compute.StoragePoolTypeAggregatedList.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1"} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get("a"), compute.StoragePoolTypesScopedList) + assert pager.get("h") is None + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tuple) for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == ( + str, + compute.StoragePoolTypesScopedList, + ) + + assert pager.get("a") is None + assert isinstance(pager.get("h"), compute.StoragePoolTypesScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetStoragePoolTypeRequest, + dict, + ], +) +def test_get_rest(request_type): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "storage_pool_type": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolType( + creation_timestamp="creation_timestamp_value", + description="description_value", + id=205, + kind="kind_value", + max_pool_provisioned_capacity_gb=3397, + max_pool_provisioned_iops=2698, + max_pool_provisioned_throughput=3369, + min_pool_provisioned_capacity_gb=3395, + min_pool_provisioned_iops=2696, + min_pool_provisioned_throughput=3367, + min_size_gb=1158, + name="name_value", + self_link="self_link_value", + self_link_with_id="self_link_with_id_value", + supported_disk_types=["supported_disk_types_value"], + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.StoragePoolType) + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.max_pool_provisioned_capacity_gb == 3397 + assert response.max_pool_provisioned_iops == 2698 + assert response.max_pool_provisioned_throughput == 3369 + assert response.min_pool_provisioned_capacity_gb == 3395 + assert response.min_pool_provisioned_iops == 2696 + assert response.min_pool_provisioned_throughput == 3367 + assert response.min_size_gb == 1158 + assert response.name == "name_value" + assert response.self_link == "self_link_value" + assert response.self_link_with_id == "self_link_with_id_value" + assert response.supported_disk_types == ["supported_disk_types_value"] + assert response.zone == "zone_value" + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields(request_type=compute.GetStoragePoolTypeRequest): + transport_class = transports.StoragePoolTypesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["storage_pool_type"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["storagePoolType"] = "storage_pool_type_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "storagePoolType" in jsonified_request + assert jsonified_request["storagePoolType"] == "storage_pool_type_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolType() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.StoragePoolType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "storagePoolType", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolTypesRestInterceptor(), + ) + client = StoragePoolTypesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "post_get" + ) as post, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "pre_get" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetStoragePoolTypeRequest.pb( + compute.GetStoragePoolTypeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.StoragePoolType.to_json( + compute.StoragePoolType() + ) + + request = compute.GetStoragePoolTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.StoragePoolType() + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetStoragePoolTypeRequest +): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "storage_pool_type": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolType() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "storage_pool_type": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool_type="storage_pool_type_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePoolTypes/{storage_pool_type}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetStoragePoolTypeRequest(), + project="project_value", + zone="zone_value", + storage_pool_type="storage_pool_type_value", + ) + + +def test_get_rest_error(): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListStoragePoolTypesRequest, + dict, + ], +) +def test_list_rest(request_type): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolTypeList( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields(request_type=compute.ListStoragePoolTypesRequest): + transport_class = transports.StoragePoolTypesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolTypeList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.StoragePoolTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolTypesRestInterceptor(), + ) + client = StoragePoolTypesClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "post_list" + ) as post, mock.patch.object( + transports.StoragePoolTypesRestInterceptor, "pre_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListStoragePoolTypesRequest.pb( + compute.ListStoragePoolTypesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.StoragePoolTypeList.to_json( + compute.StoragePoolTypeList() + ) + + request = compute.ListStoragePoolTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.StoragePoolTypeList() + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListStoragePoolTypesRequest +): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolTypeList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolTypeList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePoolTypes" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListStoragePoolTypesRequest(), + project="project_value", + zone="zone_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.StoragePoolTypeList( + items=[ + compute.StoragePoolType(), + compute.StoragePoolType(), + compute.StoragePoolType(), + ], + next_page_token="abc", + ), + compute.StoragePoolTypeList( + items=[], + next_page_token="def", + ), + compute.StoragePoolTypeList( + items=[ + compute.StoragePoolType(), + ], + next_page_token="ghi", + ), + compute.StoragePoolTypeList( + items=[ + compute.StoragePoolType(), + compute.StoragePoolType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.StoragePoolTypeList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "zone": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.StoragePoolType) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StoragePoolTypesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StoragePoolTypesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StoragePoolTypesClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StoragePoolTypesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.StoragePoolTypesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = StoragePoolTypesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StoragePoolTypesRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = StoragePoolTypesClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_storage_pool_types_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.StoragePoolTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_storage_pool_types_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.storage_pool_types.transports.StoragePoolTypesTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.StoragePoolTypesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "aggregated_list", + "get", + "list", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_storage_pool_types_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.compute_v1.services.storage_pool_types.transports.StoragePoolTypesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StoragePoolTypesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_storage_pool_types_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.compute_v1.services.storage_pool_types.transports.StoragePoolTypesTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StoragePoolTypesTransport() + adc.assert_called_once() + + +def test_storage_pool_types_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + StoragePoolTypesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute.readonly", + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_storage_pool_types_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.StoragePoolTypesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_storage_pool_types_host_no_port(transport_name): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_storage_pool_types_host_with_port(transport_name): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_storage_pool_types_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = StoragePoolTypesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = StoragePoolTypesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = StoragePoolTypesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = StoragePoolTypesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolTypesClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = StoragePoolTypesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = StoragePoolTypesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolTypesClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = StoragePoolTypesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = StoragePoolTypesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolTypesClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = StoragePoolTypesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = StoragePoolTypesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolTypesClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = StoragePoolTypesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = StoragePoolTypesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolTypesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.StoragePoolTypesTransport, "_prep_wrapped_messages" + ) as prep: + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.StoragePoolTypesTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = StoragePoolTypesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = StoragePoolTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (StoragePoolTypesClient, transports.StoragePoolTypesRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py new file mode 100644 index 000000000000..6c7c06b9e384 --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_storage_pools.py @@ -0,0 +1,6838 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import Iterable +import json +import math + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import api_core_version, client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation # type: ignore +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +from google.cloud.compute_v1.services.storage_pools import ( + StoragePoolsClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert StoragePoolsClient._get_default_mtls_endpoint(None) is None + assert ( + StoragePoolsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + StoragePoolsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + StoragePoolsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + StoragePoolsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert StoragePoolsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert StoragePoolsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert StoragePoolsClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert StoragePoolsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + StoragePoolsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert StoragePoolsClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert StoragePoolsClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert StoragePoolsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + StoragePoolsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert StoragePoolsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert StoragePoolsClient._get_client_cert_source(None, False) is None + assert ( + StoragePoolsClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + StoragePoolsClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + StoragePoolsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + StoragePoolsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + StoragePoolsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(StoragePoolsClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = StoragePoolsClient._DEFAULT_UNIVERSE + default_endpoint = StoragePoolsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = StoragePoolsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + StoragePoolsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + StoragePoolsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == StoragePoolsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + StoragePoolsClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + StoragePoolsClient._get_api_endpoint(None, None, default_universe, "always") + == StoragePoolsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + StoragePoolsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == StoragePoolsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + StoragePoolsClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + StoragePoolsClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + StoragePoolsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + StoragePoolsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + StoragePoolsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + StoragePoolsClient._get_universe_domain(None, None) + == StoragePoolsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + StoragePoolsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (StoragePoolsClient, transports.StoragePoolsRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (StoragePoolsClient, "rest"), + ], +) +def test_storage_pools_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.StoragePoolsRestTransport, "rest"), + ], +) +def test_storage_pools_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (StoragePoolsClient, "rest"), + ], +) +def test_storage_pools_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_storage_pools_client_get_transport_class(): + transport = StoragePoolsClient.get_transport_class() + available_transports = [ + transports.StoragePoolsRestTransport, + ] + assert transport in available_transports + + transport = StoragePoolsClient.get_transport_class("rest") + assert transport == transports.StoragePoolsRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (StoragePoolsClient, transports.StoragePoolsRestTransport, "rest"), + ], +) +@mock.patch.object( + StoragePoolsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(StoragePoolsClient), +) +def test_storage_pools_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(StoragePoolsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(StoragePoolsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (StoragePoolsClient, transports.StoragePoolsRestTransport, "rest", "true"), + (StoragePoolsClient, transports.StoragePoolsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + StoragePoolsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(StoragePoolsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_storage_pools_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [StoragePoolsClient]) +@mock.patch.object( + StoragePoolsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(StoragePoolsClient) +) +def test_storage_pools_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [StoragePoolsClient]) +@mock.patch.object( + StoragePoolsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(StoragePoolsClient), +) +def test_storage_pools_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = StoragePoolsClient._DEFAULT_UNIVERSE + default_endpoint = StoragePoolsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = StoragePoolsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (StoragePoolsClient, transports.StoragePoolsRestTransport, "rest"), + ], +) +def test_storage_pools_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (StoragePoolsClient, transports.StoragePoolsRestTransport, "rest", None), + ], +) +def test_storage_pools_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.AggregatedListStoragePoolsRequest, + dict, + ], +) +def test_aggregated_list_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolAggregatedList( + etag="etag_value", + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + unreachables=["unreachables_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.aggregated_list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListPager) + assert response.etag == "etag_value" + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + assert response.unreachables == ["unreachables_value"] + + +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_aggregated_list_rest_required_fields( + request_type=compute.AggregatedListStoragePoolsRequest, +): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "include_all_scopes", + "max_results", + "order_by", + "page_token", + "return_partial_success", + "service_project_number", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.StoragePoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.aggregated_list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "includeAllScopes", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + "serviceProjectNumber", + ) + ) + & set(("project",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_aggregated_list" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_aggregated_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.AggregatedListStoragePoolsRequest.pb( + compute.AggregatedListStoragePoolsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.StoragePoolAggregatedList.to_json( + compute.StoragePoolAggregatedList() + ) + + request = compute.AggregatedListStoragePoolsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.StoragePoolAggregatedList() + + client.aggregated_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListStoragePoolsRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + +def test_aggregated_list_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/aggregated/storagePools" + % client.transport._host, + args[1], + ) + + +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListStoragePoolsRequest(), + project="project_value", + ) + + +def test_aggregated_list_rest_pager(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.StoragePoolAggregatedList( + items={ + "a": compute.StoragePoolsScopedList(), + "b": compute.StoragePoolsScopedList(), + "c": compute.StoragePoolsScopedList(), + }, + next_page_token="abc", + ), + compute.StoragePoolAggregatedList( + items={}, + next_page_token="def", + ), + compute.StoragePoolAggregatedList( + items={ + "g": compute.StoragePoolsScopedList(), + }, + next_page_token="ghi", + ), + compute.StoragePoolAggregatedList( + items={ + "h": compute.StoragePoolsScopedList(), + "i": compute.StoragePoolsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.StoragePoolAggregatedList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1"} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get("a"), compute.StoragePoolsScopedList) + assert pager.get("h") is None + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tuple) for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == ( + str, + compute.StoragePoolsScopedList, + ) + + assert pager.get("a") is None + assert isinstance(pager.get("h"), compute.StoragePoolsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteStoragePoolRequest, + dict, + ], +) +def test_delete_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rest_required_fields(request_type=compute.DeleteStoragePoolRequest): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["storage_pool"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["storagePool"] = "storage_pool_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "storagePool" in jsonified_request + assert jsonified_request["storagePool"] == "storage_pool_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "storagePool", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_delete" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_delete" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteStoragePoolRequest.pb( + compute.DeleteStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + +def test_delete_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "storage_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteStoragePoolRequest(), + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + ) + + +def test_delete_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteStoragePoolRequest, + dict, + ], +) +def test_delete_unary_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteStoragePoolRequest, +): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["storage_pool"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["storagePool"] = "storage_pool_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "storagePool" in jsonified_request + assert jsonified_request["storagePool"] == "storage_pool_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "storagePool", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_unary_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_delete" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_delete" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.DeleteStoragePoolRequest.pb( + compute.DeleteStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.DeleteStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.delete_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_unary_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_unary(request) + + +def test_delete_unary_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "storage_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteStoragePoolRequest(), + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + ) + + +def test_delete_unary_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetStoragePoolRequest, + dict, + ], +) +def test_get_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + id=205, + kind="kind_value", + label_fingerprint="label_fingerprint_value", + name="name_value", + performance_provisioning_type="performance_provisioning_type_value", + pool_provisioned_capacity_gb=2976, + pool_provisioned_iops=2277, + pool_provisioned_throughput=2948, + self_link="self_link_value", + self_link_with_id="self_link_with_id_value", + state="state_value", + storage_pool_type="storage_pool_type_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.StoragePool) + assert response.capacity_provisioning_type == "capacity_provisioning_type_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.label_fingerprint == "label_fingerprint_value" + assert response.name == "name_value" + assert ( + response.performance_provisioning_type == "performance_provisioning_type_value" + ) + assert response.pool_provisioned_capacity_gb == 2976 + assert response.pool_provisioned_iops == 2277 + assert response.pool_provisioned_throughput == 2948 + assert response.self_link == "self_link_value" + assert response.self_link_with_id == "self_link_with_id_value" + assert response.state == "state_value" + assert response.storage_pool_type == "storage_pool_type_value" + assert response.zone == "zone_value" + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields(request_type=compute.GetStoragePoolRequest): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["storage_pool"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["storagePool"] = "storage_pool_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "storagePool" in jsonified_request + assert jsonified_request["storagePool"] == "storage_pool_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.StoragePool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.StoragePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "storagePool", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_get" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_get" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetStoragePoolRequest.pb(compute.GetStoragePoolRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.StoragePool.to_json(compute.StoragePool()) + + request = compute.GetStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.StoragePool() + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePool() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "storage_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePool.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetStoragePoolRequest(), + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + ) + + +def test_get_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetIamPolicyStoragePoolRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=compute.GetIamPolicyStoragePoolRequest, +): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("optionsRequestedPolicyVersion",)) + & set( + ( + "project", + "resource", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_get_iam_policy" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_get_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.GetIamPolicyStoragePoolRequest.pb( + compute.GetIamPolicyStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.GetIamPolicyStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +def test_get_iam_policy_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyStoragePoolRequest(), + project="project_value", + zone="zone_value", + resource="resource_value", + ) + + +def test_get_iam_policy_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertStoragePoolRequest, + dict, + ], +) +def test_insert_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["storage_pool_resource"] = { + "capacity_provisioning_type": "capacity_provisioning_type_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "name": "name_value", + "performance_provisioning_type": "performance_provisioning_type_value", + "pool_provisioned_capacity_gb": 2976, + "pool_provisioned_iops": 2277, + "pool_provisioned_throughput": 2948, + "resource_status": { + "disk_count": 1075, + "last_resize_timestamp": "last_resize_timestamp_value", + "max_total_provisioned_disk_capacity_gb": 4025, + "pool_used_capacity_bytes": 2557, + "pool_used_iops": 1508, + "pool_used_throughput": 2179, + "pool_user_written_bytes": 2506, + "total_provisioned_disk_capacity_gb": 3604, + "total_provisioned_disk_iops": 2905, + "total_provisioned_disk_throughput": 3576, + }, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "status": {}, + "storage_pool_type": "storage_pool_type_value", + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertStoragePoolRequest.meta.fields["storage_pool_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "storage_pool_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["storage_pool_resource"][field])): + del request_init["storage_pool_resource"][field][i][subfield] + else: + del request_init["storage_pool_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_rest_required_fields(request_type=compute.InsertStoragePoolRequest): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "storagePoolResource", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertStoragePoolRequest.pb( + compute.InsertStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool_resource=compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertStoragePoolRequest(), + project="project_value", + zone="zone_value", + storage_pool_resource=compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value" + ), + ) + + +def test_insert_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertStoragePoolRequest, + dict, + ], +) +def test_insert_unary_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["storage_pool_resource"] = { + "capacity_provisioning_type": "capacity_provisioning_type_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "name": "name_value", + "performance_provisioning_type": "performance_provisioning_type_value", + "pool_provisioned_capacity_gb": 2976, + "pool_provisioned_iops": 2277, + "pool_provisioned_throughput": 2948, + "resource_status": { + "disk_count": 1075, + "last_resize_timestamp": "last_resize_timestamp_value", + "max_total_provisioned_disk_capacity_gb": 4025, + "pool_used_capacity_bytes": 2557, + "pool_used_iops": 1508, + "pool_used_throughput": 2179, + "pool_user_written_bytes": 2506, + "total_provisioned_disk_capacity_gb": 3604, + "total_provisioned_disk_iops": 2905, + "total_provisioned_disk_throughput": 3576, + }, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "status": {}, + "storage_pool_type": "storage_pool_type_value", + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertStoragePoolRequest.meta.fields["storage_pool_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "storage_pool_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["storage_pool_resource"][field])): + del request_init["storage_pool_resource"][field][i][subfield] + else: + del request_init["storage_pool_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.insert_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertStoragePoolRequest, +): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "storagePoolResource", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_unary_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_insert" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_insert" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.InsertStoragePoolRequest.pb( + compute.InsertStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.InsertStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.insert_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_insert_unary_rest_bad_request( + transport: str = "rest", request_type=compute.InsertStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert_unary(request) + + +def test_insert_unary_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool_resource=compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertStoragePoolRequest(), + project="project_value", + zone="zone_value", + storage_pool_resource=compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value" + ), + ) + + +def test_insert_unary_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListStoragePoolsRequest, + dict, + ], +) +def test_list_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolList( + etag="etag_value", + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + unreachables=["unreachables_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.etag == "etag_value" + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + assert response.unreachables == ["unreachables_value"] + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields(request_type=compute.ListStoragePoolsRequest): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.StoragePoolList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_list" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_list" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListStoragePoolsRequest.pb( + compute.ListStoragePoolsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.StoragePoolList.to_json( + compute.StoragePoolList() + ) + + request = compute.ListStoragePoolsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.StoragePoolList() + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListStoragePoolsRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) + + +def test_list_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListStoragePoolsRequest(), + project="project_value", + zone="zone_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.StoragePoolList( + items=[ + compute.StoragePool(), + compute.StoragePool(), + compute.StoragePool(), + ], + next_page_token="abc", + ), + compute.StoragePoolList( + items=[], + next_page_token="def", + ), + compute.StoragePoolList( + items=[ + compute.StoragePool(), + ], + next_page_token="ghi", + ), + compute.StoragePoolList( + items=[ + compute.StoragePool(), + compute.StoragePool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.StoragePoolList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "zone": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.StoragePool) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListDisksStoragePoolsRequest, + dict, + ], +) +def test_list_disks_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolListDisks( + etag="etag_value", + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + unreachables=["unreachables_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolListDisks.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_disks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDisksPager) + assert response.etag == "etag_value" + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + assert response.unreachables == ["unreachables_value"] + + +def test_list_disks_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_disks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_disks] = mock_rpc + + request = {} + client.list_disks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_disks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_disks_rest_required_fields( + request_type=compute.ListDisksStoragePoolsRequest, +): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["storage_pool"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_disks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["storagePool"] = "storage_pool_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_disks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "storagePool" in jsonified_request + assert jsonified_request["storagePool"] == "storage_pool_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolListDisks() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.StoragePoolListDisks.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_disks(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_disks_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_disks._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "storagePool", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_disks_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_list_disks" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_list_disks" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.ListDisksStoragePoolsRequest.pb( + compute.ListDisksStoragePoolsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.StoragePoolListDisks.to_json( + compute.StoragePoolListDisks() + ) + + request = compute.ListDisksStoragePoolsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.StoragePoolListDisks() + + client.list_disks( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_disks_rest_bad_request( + transport: str = "rest", request_type=compute.ListDisksStoragePoolsRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_disks(request) + + +def test_list_disks_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.StoragePoolListDisks() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "storage_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.StoragePoolListDisks.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_disks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}/listDisks" + % client.transport._host, + args[1], + ) + + +def test_list_disks_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_disks( + compute.ListDisksStoragePoolsRequest(), + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + ) + + +def test_list_disks_rest_pager(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.StoragePoolListDisks( + items=[ + compute.StoragePoolDisk(), + compute.StoragePoolDisk(), + compute.StoragePoolDisk(), + ], + next_page_token="abc", + ), + compute.StoragePoolListDisks( + items=[], + next_page_token="def", + ), + compute.StoragePoolListDisks( + items=[ + compute.StoragePoolDisk(), + ], + next_page_token="ghi", + ), + compute.StoragePoolListDisks( + items=[ + compute.StoragePoolDisk(), + compute.StoragePoolDisk(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.StoragePoolListDisks.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project": "sample1", + "zone": "sample2", + "storage_pool": "sample3", + } + + pager = client.list_disks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.StoragePoolDisk) for i in results) + + pages = list(client.list_disks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetIamPolicyStoragePoolRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { + "audit_log_configs": [ + { + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "ignore_child_exemptions": True, + "log_type": "log_type_value", + } + ], + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "service": "service_value", + } + ], + "bindings": {}, + "etag": "etag_value", + "iam_owned": True, + "rules": [ + { + "action": "action_value", + "conditions": [ + { + "iam": "iam_value", + "op": "op_value", + "svc": "svc_value", + "sys": "sys_value", + "values": ["values_value1", "values_value2"], + } + ], + "description": "description_value", + "ins": ["ins_value1", "ins_value2"], + "log_configs": [ + { + "cloud_audit": { + "authorization_logging_options": { + "permission_type": "permission_type_value" + }, + "log_name": "log_name_value", + }, + "counter": { + "custom_fields": [ + {"name": "name_value", "value": "value_value"} + ], + "field": "field_value", + "metric": "metric_value", + }, + "data_access": {"log_mode": "log_mode_value"}, + } + ], + "not_ins": ["not_ins_value1", "not_ins_value2"], + "permissions": ["permissions_value1", "permissions_value2"], + } + ], + "version": 774, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyStoragePoolRequest.meta.fields[ + "zone_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "zone_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["zone_set_policy_request_resource"][field]) + ): + del request_init["zone_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["zone_set_policy_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=compute.SetIamPolicyStoragePoolRequest, +): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "resource", + "zone", + "zoneSetPolicyRequestResource", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_set_iam_policy" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_set_iam_policy" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.SetIamPolicyStoragePoolRequest.pb( + compute.SetIamPolicyStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Policy.to_json(compute.Policy()) + + request = compute.SetIamPolicyStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +def test_set_iam_policy_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + resource="resource_value", + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyStoragePoolRequest(), + project="project_value", + zone="zone_value", + resource="resource_value", + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + + +def test_set_iam_policy_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.TestIamPermissionsStoragePoolRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = { + "permissions": ["permissions_value1", "permissions_value2"] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsStoragePoolRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields( + request_type=compute.TestIamPermissionsStoragePoolRequest, +): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "resource", + "testPermissionsRequestResource", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_test_iam_permissions" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_test_iam_permissions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.TestIamPermissionsStoragePoolRequest.pb( + compute.TestIamPermissionsStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.TestPermissionsResponse.to_json( + compute.TestPermissionsResponse() + ) + + request = compute.TestIamPermissionsStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +def test_test_iam_permissions_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsStoragePoolRequest(), + project="project_value", + zone="zone_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + + +def test_test_iam_permissions_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateStoragePoolRequest, + dict, + ], +) +def test_update_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request_init["storage_pool_resource"] = { + "capacity_provisioning_type": "capacity_provisioning_type_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "name": "name_value", + "performance_provisioning_type": "performance_provisioning_type_value", + "pool_provisioned_capacity_gb": 2976, + "pool_provisioned_iops": 2277, + "pool_provisioned_throughput": 2948, + "resource_status": { + "disk_count": 1075, + "last_resize_timestamp": "last_resize_timestamp_value", + "max_total_provisioned_disk_capacity_gb": 4025, + "pool_used_capacity_bytes": 2557, + "pool_used_iops": 1508, + "pool_used_throughput": 2179, + "pool_user_written_bytes": 2506, + "total_provisioned_disk_capacity_gb": 3604, + "total_provisioned_disk_iops": 2905, + "total_provisioned_disk_throughput": 3576, + }, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "status": {}, + "storage_pool_type": "storage_pool_type_value", + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateStoragePoolRequest.meta.fields["storage_pool_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "storage_pool_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["storage_pool_resource"][field])): + del request_init["storage_pool_resource"][field][i][subfield] + else: + del request_init["storage_pool_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_rest_required_fields(request_type=compute.UpdateStoragePoolRequest): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["storage_pool"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["storagePool"] = "storage_pool_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "storagePool" in jsonified_request + assert jsonified_request["storagePool"] == "storage_pool_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "project", + "storagePool", + "storagePoolResource", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_update" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_update" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateStoragePoolRequest.pb( + compute.UpdateStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + +def test_update_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "storage_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + storage_pool_resource=compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateStoragePoolRequest(), + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + storage_pool_resource=compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value" + ), + ) + + +def test_update_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateStoragePoolRequest, + dict, + ], +) +def test_update_unary_rest(request_type): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request_init["storage_pool_resource"] = { + "capacity_provisioning_type": "capacity_provisioning_type_value", + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "name": "name_value", + "performance_provisioning_type": "performance_provisioning_type_value", + "pool_provisioned_capacity_gb": 2976, + "pool_provisioned_iops": 2277, + "pool_provisioned_throughput": 2948, + "resource_status": { + "disk_count": 1075, + "last_resize_timestamp": "last_resize_timestamp_value", + "max_total_provisioned_disk_capacity_gb": 4025, + "pool_used_capacity_bytes": 2557, + "pool_used_iops": 1508, + "pool_used_throughput": 2179, + "pool_user_written_bytes": 2506, + "total_provisioned_disk_capacity_gb": 3604, + "total_provisioned_disk_iops": 2905, + "total_provisioned_disk_throughput": 3576, + }, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "status": {}, + "storage_pool_type": "storage_pool_type_value", + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateStoragePoolRequest.meta.fields["storage_pool_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "storage_pool_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["storage_pool_resource"][field])): + del request_init["storage_pool_resource"][field][i][subfield] + else: + del request_init["storage_pool_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_unary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Operation) + + +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_unary_rest_required_fields( + request_type=compute.UpdateStoragePoolRequest, +): + transport_class = transports.StoragePoolsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["storage_pool"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["storagePool"] = "storage_pool_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "storagePool" in jsonified_request + assert jsonified_request["storagePool"] == "storage_pool_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "project", + "storagePool", + "storagePoolResource", + "zone", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_unary_rest_interceptors(null_interceptor): + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.StoragePoolsRestInterceptor(), + ) + client = StoragePoolsClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.StoragePoolsRestInterceptor, "post_update" + ) as post, mock.patch.object( + transports.StoragePoolsRestInterceptor, "pre_update" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = compute.UpdateStoragePoolRequest.pb( + compute.UpdateStoragePoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = compute.Operation.to_json(compute.Operation()) + + request = compute.UpdateStoragePoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + + client.update_unary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_unary_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateStoragePoolRequest +): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "storage_pool": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_unary(request) + + +def test_update_unary_rest_flattened(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "storage_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + storage_pool_resource=compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/storagePools/{storage_pool}" + % client.transport._host, + args[1], + ) + + +def test_update_unary_rest_flattened_error(transport: str = "rest"): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateStoragePoolRequest(), + project="project_value", + zone="zone_value", + storage_pool="storage_pool_value", + storage_pool_resource=compute.StoragePool( + capacity_provisioning_type="capacity_provisioning_type_value" + ), + ) + + +def test_update_unary_rest_error(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StoragePoolsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StoragePoolsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = StoragePoolsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = StoragePoolsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.StoragePoolsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = StoragePoolsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.StoragePoolsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_transport_kind(transport_name): + transport = StoragePoolsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_storage_pools_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.StoragePoolsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_storage_pools_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.storage_pools.transports.StoragePoolsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.StoragePoolsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "aggregated_list", + "delete", + "get", + "get_iam_policy", + "insert", + "list", + "list_disks", + "set_iam_policy", + "test_iam_permissions", + "update", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_storage_pools_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.compute_v1.services.storage_pools.transports.StoragePoolsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StoragePoolsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_storage_pools_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.compute_v1.services.storage_pools.transports.StoragePoolsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.StoragePoolsTransport() + adc.assert_called_once() + + +def test_storage_pools_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + StoragePoolsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_storage_pools_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.StoragePoolsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_storage_pools_host_no_port(transport_name): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_storage_pools_host_with_port(transport_name): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_storage_pools_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = StoragePoolsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = StoragePoolsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_disks._session + session2 = client2.transport.list_disks._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = StoragePoolsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = StoragePoolsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = StoragePoolsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = StoragePoolsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = StoragePoolsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = StoragePoolsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = StoragePoolsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = StoragePoolsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = StoragePoolsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = StoragePoolsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = StoragePoolsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.StoragePoolsTransport, "_prep_wrapped_messages" + ) as prep: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.StoragePoolsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = StoragePoolsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = StoragePoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (StoragePoolsClient, transports.StoragePoolsRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py index 1366f1d6f420..c04ea0e7bb23 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py @@ -984,6 +984,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListSubnetworksRequest, ): @@ -1385,6 +1421,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteSubnetworkRequest): transport_class = transports.SubnetworksRestTransport @@ -1695,6 +1771,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteSubnetworkRequest, ): @@ -2114,6 +2230,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_expand_ip_cidr_range_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.expand_ip_cidr_range in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.expand_ip_cidr_range + ] = mock_rpc + + request = {} + client.expand_ip_cidr_range(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.expand_ip_cidr_range(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_expand_ip_cidr_range_rest_required_fields( request_type=compute.ExpandIpCidrRangeSubnetworkRequest, ): @@ -2519,6 +2679,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_expand_ip_cidr_range_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.expand_ip_cidr_range in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.expand_ip_cidr_range + ] = mock_rpc + + request = {} + client.expand_ip_cidr_range_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.expand_ip_cidr_range_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_expand_ip_cidr_range_unary_rest_required_fields( request_type=compute.ExpandIpCidrRangeSubnetworkRequest, ): @@ -2863,6 +3067,42 @@ def test_get_rest(request_type): assert response.state == "state_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetSubnetworkRequest): transport_class = transports.SubnetworksRestTransport @@ -3153,6 +3393,42 @@ def test_get_iam_policy_rest(request_type): assert response.version == 774 +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_iam_policy_rest_required_fields( request_type=compute.GetIamPolicySubnetworkRequest, ): @@ -3594,6 +3870,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertSubnetworkRequest): transport_class = transports.SubnetworksRestTransport @@ -4008,6 +4324,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertSubnetworkRequest, ): @@ -4303,6 +4659,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListSubnetworksRequest): transport_class = transports.SubnetworksRestTransport @@ -4659,6 +5051,42 @@ def test_list_usable_rest(request_type): assert response.self_link == "self_link_value" +def test_list_usable_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_usable in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_usable] = mock_rpc + + request = {} + client.list_usable(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_usable(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_usable_rest_required_fields( request_type=compute.ListUsableSubnetworksRequest, ): @@ -5153,6 +5581,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchSubnetworkRequest): transport_class = transports.SubnetworksRestTransport @@ -5586,6 +6054,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchSubnetworkRequest): transport_class = transports.SubnetworksRestTransport @@ -6048,6 +6556,42 @@ def get_message_fields(field): assert response.version == 774 +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_iam_policy_rest_required_fields( request_type=compute.SetIamPolicySubnetworkRequest, ): @@ -6473,6 +7017,51 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_private_ip_google_access_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_private_ip_google_access + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_private_ip_google_access + ] = mock_rpc + + request = {} + client.set_private_ip_google_access(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_private_ip_google_access(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_private_ip_google_access_rest_required_fields( request_type=compute.SetPrivateIpGoogleAccessSubnetworkRequest, ): @@ -6879,6 +7468,51 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_private_ip_google_access_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_private_ip_google_access + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_private_ip_google_access + ] = mock_rpc + + request = {} + client.set_private_ip_google_access_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_private_ip_google_access_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_private_ip_google_access_unary_rest_required_fields( request_type=compute.SetPrivateIpGoogleAccessSubnetworkRequest, ): @@ -7260,6 +7894,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsSubnetworkRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py index 7f26b736a78a..7f40c31f333f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py @@ -1067,6 +1067,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteTargetGrpcProxyRequest): transport_class = transports.TargetGrpcProxiesRestTransport @@ -1366,6 +1406,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteTargetGrpcProxyRequest, ): @@ -1665,6 +1745,42 @@ def test_get_rest(request_type): assert response.validate_for_proxyless is True +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetTargetGrpcProxyRequest): transport_class = transports.TargetGrpcProxiesRestTransport @@ -2071,6 +2187,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertTargetGrpcProxyRequest): transport_class = transports.TargetGrpcProxiesRestTransport @@ -2456,6 +2612,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertTargetGrpcProxyRequest, ): @@ -2744,6 +2940,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListTargetGrpcProxiesRequest): transport_class = transports.TargetGrpcProxiesRestTransport @@ -3212,6 +3444,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchTargetGrpcProxyRequest): transport_class = transports.TargetGrpcProxiesRestTransport @@ -3604,6 +3876,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchTargetGrpcProxyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py index 4fccb2804898..1652960ab925 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_http_proxies.py @@ -1033,6 +1033,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListTargetHttpProxiesRequest, ): @@ -1436,6 +1472,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteTargetHttpProxyRequest): transport_class = transports.TargetHttpProxiesRestTransport @@ -1735,6 +1811,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteTargetHttpProxyRequest, ): @@ -2036,6 +2152,42 @@ def test_get_rest(request_type): assert response.url_map == "url_map_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetTargetHttpProxyRequest): transport_class = transports.TargetHttpProxiesRestTransport @@ -2443,6 +2595,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertTargetHttpProxyRequest): transport_class = transports.TargetHttpProxiesRestTransport @@ -2829,6 +3021,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertTargetHttpProxyRequest, ): @@ -3117,6 +3349,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListTargetHttpProxiesRequest): transport_class = transports.TargetHttpProxiesRestTransport @@ -3586,6 +3854,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchTargetHttpProxyRequest): transport_class = transports.TargetHttpProxiesRestTransport @@ -3979,6 +4287,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchTargetHttpProxyRequest, ): @@ -4384,6 +4732,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_url_map_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_url_map in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_url_map] = mock_rpc + + request = {} + client.set_url_map(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_url_map(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_url_map_rest_required_fields( request_type=compute.SetUrlMapTargetHttpProxyRequest, ): @@ -4763,6 +5151,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_url_map_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_url_map in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_url_map] = mock_rpc + + request = {} + client.set_url_map_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_url_map_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_url_map_unary_rest_required_fields( request_type=compute.SetUrlMapTargetHttpProxyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py index b66b0ba32ec6..903b70ea85db 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_https_proxies.py @@ -1037,6 +1037,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListTargetHttpsProxiesRequest, ): @@ -1441,6 +1477,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteTargetHttpsProxyRequest, ): @@ -1742,6 +1818,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteTargetHttpsProxyRequest, ): @@ -2055,6 +2171,42 @@ def test_get_rest(request_type): assert response.url_map == "url_map_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetTargetHttpsProxyRequest): transport_class = transports.TargetHttpsProxiesRestTransport @@ -2468,6 +2620,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertTargetHttpsProxyRequest, ): @@ -2862,6 +3054,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertTargetHttpsProxyRequest, ): @@ -3150,6 +3382,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListTargetHttpsProxiesRequest): transport_class = transports.TargetHttpsProxiesRestTransport @@ -3625,6 +3893,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchTargetHttpsProxyRequest): transport_class = transports.TargetHttpsProxiesRestTransport @@ -4024,6 +4332,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields( request_type=compute.PatchTargetHttpsProxyRequest, ): @@ -4440,6 +4788,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_certificate_map_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_certificate_map in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_certificate_map + ] = mock_rpc + + request = {} + client.set_certificate_map(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_certificate_map(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_certificate_map_rest_required_fields( request_type=compute.SetCertificateMapTargetHttpsProxyRequest, ): @@ -4835,6 +5227,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_certificate_map_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_certificate_map in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_certificate_map + ] = mock_rpc + + request = {} + client.set_certificate_map_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_certificate_map_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_certificate_map_unary_rest_required_fields( request_type=compute.SetCertificateMapTargetHttpsProxyRequest, ): @@ -5252,6 +5688,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_quic_override_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_quic_override in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_quic_override + ] = mock_rpc + + request = {} + client.set_quic_override(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_quic_override(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_quic_override_rest_required_fields( request_type=compute.SetQuicOverrideTargetHttpsProxyRequest, ): @@ -5646,6 +6124,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_quic_override_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_quic_override in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_quic_override + ] = mock_rpc + + request = {} + client.set_quic_override_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_quic_override_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_quic_override_unary_rest_required_fields( request_type=compute.SetQuicOverrideTargetHttpsProxyRequest, ): @@ -6062,6 +6582,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_ssl_certificates_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_ssl_certificates in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_ssl_certificates + ] = mock_rpc + + request = {} + client.set_ssl_certificates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_certificates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_certificates_rest_required_fields( request_type=compute.SetSslCertificatesTargetHttpsProxyRequest, ): @@ -6457,6 +7021,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_ssl_certificates_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_ssl_certificates in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_ssl_certificates + ] = mock_rpc + + request = {} + client.set_ssl_certificates_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_certificates_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_certificates_unary_rest_required_fields( request_type=compute.SetSslCertificatesTargetHttpsProxyRequest, ): @@ -6865,6 +7473,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_ssl_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_ssl_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_ssl_policy] = mock_rpc + + request = {} + client.set_ssl_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_policy_rest_required_fields( request_type=compute.SetSslPolicyTargetHttpsProxyRequest, ): @@ -7250,6 +7898,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_ssl_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_ssl_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_ssl_policy] = mock_rpc + + request = {} + client.set_ssl_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_policy_unary_rest_required_fields( request_type=compute.SetSslPolicyTargetHttpsProxyRequest, ): @@ -7655,6 +8343,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_url_map_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_url_map in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_url_map] = mock_rpc + + request = {} + client.set_url_map(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_url_map(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_url_map_rest_required_fields( request_type=compute.SetUrlMapTargetHttpsProxyRequest, ): @@ -8034,6 +8762,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_url_map_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_url_map in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_url_map] = mock_rpc + + request = {} + client.set_url_map_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_url_map_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_url_map_unary_rest_required_fields( request_type=compute.SetUrlMapTargetHttpsProxyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py index 1b12ea6575bb..a5f7ada2d421 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_instances.py @@ -1019,6 +1019,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListTargetInstancesRequest, ): @@ -1426,6 +1462,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteTargetInstanceRequest): transport_class = transports.TargetInstancesRestTransport @@ -1744,6 +1820,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteTargetInstanceRequest, ): @@ -2064,6 +2180,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetTargetInstanceRequest): transport_class = transports.TargetInstancesRestTransport @@ -2484,6 +2636,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertTargetInstanceRequest): transport_class = transports.TargetInstancesRestTransport @@ -2875,6 +3067,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertTargetInstanceRequest, ): @@ -3170,6 +3402,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListTargetInstancesRequest): transport_class = transports.TargetInstancesRestTransport @@ -3646,6 +3914,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_rest_required_fields( request_type=compute.SetSecurityPolicyTargetInstanceRequest, ): @@ -4052,6 +4364,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_security_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_unary_rest_required_fields( request_type=compute.SetSecurityPolicyTargetInstanceRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py index 5cceb7c578e4..3afa306ab6b1 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_pools.py @@ -1103,6 +1103,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_health_check_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_health_check in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_health_check + ] = mock_rpc + + request = {} + client.add_health_check(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_health_check(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_health_check_rest_required_fields( request_type=compute.AddHealthCheckTargetPoolRequest, ): @@ -1512,6 +1554,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_health_check_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_health_check in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.add_health_check + ] = mock_rpc + + request = {} + client.add_health_check_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_health_check_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_health_check_unary_rest_required_fields( request_type=compute.AddHealthCheckTargetPoolRequest, ): @@ -1943,6 +2027,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_add_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_instance] = mock_rpc + + request = {} + client.add_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_instance_rest_required_fields( request_type=compute.AddInstanceTargetPoolRequest, ): @@ -2348,6 +2472,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_add_instance_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.add_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.add_instance] = mock_rpc + + request = {} + client.add_instance_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.add_instance_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_add_instance_unary_rest_required_fields( request_type=compute.AddInstanceTargetPoolRequest, ): @@ -2656,6 +2820,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListTargetPoolsRequest, ): @@ -3057,6 +3257,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteTargetPoolRequest): transport_class = transports.TargetPoolsRestTransport @@ -3367,6 +3607,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteTargetPoolRequest, ): @@ -3683,6 +3963,42 @@ def test_get_rest(request_type): assert response.session_affinity == "session_affinity_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetTargetPoolRequest): transport_class = transports.TargetPoolsRestTransport @@ -4043,6 +4359,42 @@ def get_message_fields(field): assert response.kind == "kind_value" +def test_get_health_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_health in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_health] = mock_rpc + + request = {} + client.get_health(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_health(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_health_rest_required_fields( request_type=compute.GetHealthTargetPoolRequest, ): @@ -4469,6 +4821,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertTargetPoolRequest): transport_class = transports.TargetPoolsRestTransport @@ -4856,6 +5248,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertTargetPoolRequest, ): @@ -5147,6 +5579,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListTargetPoolsRequest): transport_class = transports.TargetPoolsRestTransport @@ -5624,6 +6092,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_remove_health_check_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_health_check in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_health_check + ] = mock_rpc + + request = {} + client.remove_health_check(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_health_check(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_health_check_rest_required_fields( request_type=compute.RemoveHealthCheckTargetPoolRequest, ): @@ -6033,6 +6545,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_remove_health_check_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.remove_health_check in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.remove_health_check + ] = mock_rpc + + request = {} + client.remove_health_check_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_health_check_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_health_check_unary_rest_required_fields( request_type=compute.RemoveHealthCheckTargetPoolRequest, ): @@ -6464,6 +7020,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_remove_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_instance] = mock_rpc + + request = {} + client.remove_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_instance_rest_required_fields( request_type=compute.RemoveInstanceTargetPoolRequest, ): @@ -6869,6 +7465,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_remove_instance_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.remove_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.remove_instance] = mock_rpc + + request = {} + client.remove_instance_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.remove_instance_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_remove_instance_unary_rest_required_fields( request_type=compute.RemoveInstanceTargetPoolRequest, ): @@ -7285,6 +7921,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_backup] = mock_rpc + + request = {} + client.set_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_backup_rest_required_fields( request_type=compute.SetBackupTargetPoolRequest, ): @@ -7685,6 +8361,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_backup_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_backup] = mock_rpc + + request = {} + client.set_backup_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_backup_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_backup_unary_rest_required_fields( request_type=compute.SetBackupTargetPoolRequest, ): @@ -8111,6 +8827,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_security_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_rest_required_fields( request_type=compute.SetSecurityPolicyTargetPoolRequest, ): @@ -8509,6 +9269,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_security_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_security_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_security_policy + ] = mock_rpc + + request = {} + client.set_security_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_security_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_security_policy_unary_rest_required_fields( request_type=compute.SetSecurityPolicyTargetPoolRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py index 2503a195c82d..da544efc9107 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py @@ -1059,6 +1059,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteTargetSslProxyRequest): transport_class = transports.TargetSslProxiesRestTransport @@ -1358,6 +1398,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteTargetSslProxyRequest, ): @@ -1659,6 +1739,42 @@ def test_get_rest(request_type): assert response.ssl_policy == "ssl_policy_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetTargetSslProxyRequest): transport_class = transports.TargetSslProxiesRestTransport @@ -2066,6 +2182,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertTargetSslProxyRequest): transport_class = transports.TargetSslProxiesRestTransport @@ -2452,6 +2608,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertTargetSslProxyRequest, ): @@ -2740,6 +2936,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListTargetSslProxiesRequest): transport_class = transports.TargetSslProxiesRestTransport @@ -3208,6 +3440,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_backend_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_backend_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_backend_service + ] = mock_rpc + + request = {} + client.set_backend_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_backend_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_backend_service_rest_required_fields( request_type=compute.SetBackendServiceTargetSslProxyRequest, ): @@ -3602,6 +3878,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_backend_service_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_backend_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_backend_service + ] = mock_rpc + + request = {} + client.set_backend_service_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_backend_service_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_backend_service_unary_rest_required_fields( request_type=compute.SetBackendServiceTargetSslProxyRequest, ): @@ -4018,6 +4338,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_certificate_map_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_certificate_map in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_certificate_map + ] = mock_rpc + + request = {} + client.set_certificate_map(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_certificate_map(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_certificate_map_rest_required_fields( request_type=compute.SetCertificateMapTargetSslProxyRequest, ): @@ -4412,6 +4776,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_certificate_map_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_certificate_map in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_certificate_map + ] = mock_rpc + + request = {} + client.set_certificate_map_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_certificate_map_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_certificate_map_unary_rest_required_fields( request_type=compute.SetCertificateMapTargetSslProxyRequest, ): @@ -4828,6 +5236,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_proxy_header_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_proxy_header in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_proxy_header + ] = mock_rpc + + request = {} + client.set_proxy_header(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_proxy_header(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_proxy_header_rest_required_fields( request_type=compute.SetProxyHeaderTargetSslProxyRequest, ): @@ -5222,6 +5672,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_proxy_header_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_proxy_header in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_proxy_header + ] = mock_rpc + + request = {} + client.set_proxy_header_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_proxy_header_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_proxy_header_unary_rest_required_fields( request_type=compute.SetProxyHeaderTargetSslProxyRequest, ): @@ -5638,6 +6130,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_ssl_certificates_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_ssl_certificates in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_ssl_certificates + ] = mock_rpc + + request = {} + client.set_ssl_certificates(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_certificates(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_certificates_rest_required_fields( request_type=compute.SetSslCertificatesTargetSslProxyRequest, ): @@ -6033,6 +6569,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_ssl_certificates_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_ssl_certificates in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_ssl_certificates + ] = mock_rpc + + request = {} + client.set_ssl_certificates_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_certificates_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_certificates_unary_rest_required_fields( request_type=compute.SetSslCertificatesTargetSslProxyRequest, ): @@ -6441,6 +7021,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_ssl_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_ssl_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_ssl_policy] = mock_rpc + + request = {} + client.set_ssl_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_policy_rest_required_fields( request_type=compute.SetSslPolicyTargetSslProxyRequest, ): @@ -6826,6 +7446,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_ssl_policy_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_ssl_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_ssl_policy] = mock_rpc + + request = {} + client.set_ssl_policy_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_ssl_policy_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_ssl_policy_unary_rest_required_fields( request_type=compute.SetSslPolicyTargetSslProxyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py index 86810fa053d1..a1e16261f07a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py @@ -1025,6 +1025,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListTargetTcpProxiesRequest, ): @@ -1428,6 +1464,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteTargetTcpProxyRequest): transport_class = transports.TargetTcpProxiesRestTransport @@ -1727,6 +1803,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteTargetTcpProxyRequest, ): @@ -2026,6 +2142,42 @@ def test_get_rest(request_type): assert response.service == "service_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetTargetTcpProxyRequest): transport_class = transports.TargetTcpProxiesRestTransport @@ -2432,6 +2584,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertTargetTcpProxyRequest): transport_class = transports.TargetTcpProxiesRestTransport @@ -2817,6 +3009,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertTargetTcpProxyRequest, ): @@ -3105,6 +3337,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListTargetTcpProxiesRequest): transport_class = transports.TargetTcpProxiesRestTransport @@ -3573,6 +3841,50 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_backend_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_backend_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_backend_service + ] = mock_rpc + + request = {} + client.set_backend_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_backend_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_backend_service_rest_required_fields( request_type=compute.SetBackendServiceTargetTcpProxyRequest, ): @@ -3967,6 +4279,50 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_backend_service_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_backend_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_backend_service + ] = mock_rpc + + request = {} + client.set_backend_service_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_backend_service_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_backend_service_unary_rest_required_fields( request_type=compute.SetBackendServiceTargetTcpProxyRequest, ): @@ -4383,6 +4739,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_proxy_header_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_proxy_header in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_proxy_header + ] = mock_rpc + + request = {} + client.set_proxy_header(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_proxy_header(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_proxy_header_rest_required_fields( request_type=compute.SetProxyHeaderTargetTcpProxyRequest, ): @@ -4777,6 +5175,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_proxy_header_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_proxy_header in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_proxy_header + ] = mock_rpc + + request = {} + client.set_proxy_header_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_proxy_header_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_proxy_header_unary_rest_required_fields( request_type=compute.SetProxyHeaderTargetTcpProxyRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py index f948751aea73..c7c8b8c4061b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py @@ -1033,6 +1033,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListTargetVpnGatewaysRequest, ): @@ -1440,6 +1476,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields( request_type=compute.DeleteTargetVpnGatewayRequest, ): @@ -1760,6 +1836,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteTargetVpnGatewayRequest, ): @@ -2082,6 +2198,42 @@ def test_get_rest(request_type): assert response.tunnels == ["tunnels_value"] +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetTargetVpnGatewayRequest): transport_class = transports.TargetVpnGatewaysRestTransport @@ -2506,6 +2658,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields( request_type=compute.InsertTargetVpnGatewayRequest, ): @@ -2903,6 +3095,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertTargetVpnGatewayRequest, ): @@ -3198,6 +3430,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListTargetVpnGatewaysRequest): transport_class = transports.TargetVpnGatewaysRestTransport @@ -3671,6 +3939,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsTargetVpnGatewayRequest, ): @@ -4070,6 +4378,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsTargetVpnGatewayRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py index 9e51fd41fb6c..4d98aa05c46b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py @@ -962,6 +962,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListUrlMapsRequest, ): @@ -1358,6 +1394,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -1653,6 +1729,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -1944,6 +2060,42 @@ def test_get_rest(request_type): assert response.self_link == "self_link_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -2497,6 +2649,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -3030,6 +3222,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -3426,6 +3658,48 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_invalidate_cache_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.invalidate_cache in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.invalidate_cache + ] = mock_rpc + + request = {} + client.invalidate_cache(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.invalidate_cache(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_invalidate_cache_rest_required_fields( request_type=compute.InvalidateCacheUrlMapRequest, ): @@ -3812,6 +4086,48 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_invalidate_cache_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.invalidate_cache in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.invalidate_cache + ] = mock_rpc + + request = {} + client.invalidate_cache_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.invalidate_cache_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_invalidate_cache_unary_rest_required_fields( request_type=compute.InvalidateCacheUrlMapRequest, ): @@ -4105,6 +4421,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListUrlMapsRequest): transport_class = transports.UrlMapsRestTransport @@ -4719,6 +5071,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_rest_required_fields(request_type=compute.PatchUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -5260,6 +5652,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_patch_unary_rest_required_fields(request_type=compute.PatchUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -5823,6 +6255,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_rest_required_fields(request_type=compute.UpdateUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -6364,6 +6836,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_unary_rest_required_fields(request_type=compute.UpdateUrlMapRequest): transport_class = transports.UrlMapsRestTransport @@ -6899,6 +7411,42 @@ def get_message_fields(field): assert isinstance(response, compute.UrlMapsValidateResponse) +def test_validate_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.validate] = mock_rpc + + request = {} + client.validate(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_validate_rest_required_fields(request_type=compute.ValidateUrlMapRequest): transport_class = transports.UrlMapsRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py index 3d53ffee0979..d8f73eae4d27 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_gateways.py @@ -984,6 +984,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListVpnGatewaysRequest, ): @@ -1385,6 +1421,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteVpnGatewayRequest): transport_class = transports.VpnGatewaysRestTransport @@ -1695,6 +1771,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields( request_type=compute.DeleteVpnGatewayRequest, ): @@ -2007,6 +2123,42 @@ def test_get_rest(request_type): assert response.stack_type == "stack_type_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetVpnGatewayRequest): transport_class = transports.VpnGatewaysRestTransport @@ -2290,6 +2442,42 @@ def test_get_status_rest(request_type): assert isinstance(response, compute.VpnGatewaysGetStatusResponse) +def test_get_status_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_status in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_status] = mock_rpc + + request = {} + client.get_status(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_status(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_status_rest_required_fields( request_type=compute.GetStatusVpnGatewayRequest, ): @@ -2715,6 +2903,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertVpnGatewayRequest): transport_class = transports.VpnGatewaysRestTransport @@ -3113,6 +3341,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields( request_type=compute.InsertVpnGatewayRequest, ): @@ -3408,6 +3676,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListVpnGatewaysRequest): transport_class = transports.VpnGatewaysRestTransport @@ -3879,6 +4183,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsVpnGatewayRequest, ): @@ -4278,6 +4622,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsVpnGatewayRequest, ): @@ -4656,6 +5040,46 @@ def get_message_fields(field): assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_test_iam_permissions_rest_required_fields( request_type=compute.TestIamPermissionsVpnGatewayRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py index fb9b92f1302f..5b895ed14fc1 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_vpn_tunnels.py @@ -980,6 +980,42 @@ def test_aggregated_list_rest(request_type): assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_aggregated_list_rest_required_fields( request_type=compute.AggregatedListVpnTunnelsRequest, ): @@ -1378,6 +1414,46 @@ def test_delete_rest(request_type): assert response.zone == "zone_value" +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteVpnTunnelRequest): transport_class = transports.VpnTunnelsRestTransport @@ -1686,6 +1762,46 @@ def test_delete_unary_rest(request_type): assert isinstance(response, compute.Operation) +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_unary_rest_required_fields(request_type=compute.DeleteVpnTunnelRequest): transport_class = transports.VpnTunnelsRestTransport @@ -2018,6 +2134,42 @@ def test_get_rest(request_type): assert response.vpn_gateway_interface == 2229 +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetVpnTunnelRequest): transport_class = transports.VpnTunnelsRestTransport @@ -2445,6 +2597,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_rest_required_fields(request_type=compute.InsertVpnTunnelRequest): transport_class = transports.VpnTunnelsRestTransport @@ -2849,6 +3041,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_insert_unary_rest_required_fields(request_type=compute.InsertVpnTunnelRequest): transport_class = transports.VpnTunnelsRestTransport @@ -3140,6 +3372,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListVpnTunnelsRequest): transport_class = transports.VpnTunnelsRestTransport @@ -3611,6 +3879,46 @@ def get_message_fields(field): assert response.zone == "zone_value" +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_rest_required_fields( request_type=compute.SetLabelsVpnTunnelRequest, ): @@ -4010,6 +4318,46 @@ def get_message_fields(field): assert isinstance(response, compute.Operation) +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_set_labels_unary_rest_required_fields( request_type=compute.SetLabelsVpnTunnelRequest, ): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py index b3d6cffc555b..69a07d42932b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_operations.py @@ -987,6 +987,42 @@ def test_delete_rest(request_type): assert isinstance(response, compute.DeleteZoneOperationResponse) +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_rest_required_fields(request_type=compute.DeleteZoneOperationRequest): transport_class = transports.ZoneOperationsRestTransport @@ -1319,6 +1355,42 @@ def test_get_rest(request_type): assert response.zone == "zone_value" +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetZoneOperationRequest): transport_class = transports.ZoneOperationsRestTransport @@ -1613,6 +1685,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListZoneOperationsRequest): transport_class = transports.ZoneOperationsRestTransport @@ -2007,6 +2115,42 @@ def test_wait_rest(request_type): assert response.zone == "zone_value" +def test_wait_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.wait in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.wait] = mock_rpc + + request = {} + client.wait(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.wait(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_wait_rest_required_fields(request_type=compute.WaitZoneOperationRequest): transport_class = transports.ZoneOperationsRestTransport diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py index 98876d550c7e..70b202186253 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zones.py @@ -958,6 +958,42 @@ def test_get_rest(request_type): assert response.supports_pzs is True +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_rest_required_fields(request_type=compute.GetZoneRequest): transport_class = transports.ZonesRestTransport @@ -1236,6 +1272,42 @@ def test_list_rest(request_type): assert response.self_link == "self_link_value" +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_rest_required_fields(request_type=compute.ListZonesRequest): transport_class = transports.ZonesRestTransport diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index 6e9548bc8ba4..94fb923ef181 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -23,12 +23,6 @@ replacements: \ { after: " ::\n\n {\n" count: 2 - - paths: [ - packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py, - ] - before: /\[a-zA-Z0-9-_\]/ - after: "`[a-zA-Z0-9-_]`" - count: 1 - paths: [ packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py, ]