diff --git a/packages/google-cloud-bigquery-datatransfer/protos/protos.js b/packages/google-cloud-bigquery-datatransfer/protos/protos.js index 6fe88cef7d8..61a6a6adf2b 100644 --- a/packages/google-cloud-bigquery-datatransfer/protos/protos.js +++ b/packages/google-cloud-bigquery-datatransfer/protos/protos.js @@ -765,39 +765,39 @@ DataSourceParameter.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.paramId != null && message.hasOwnProperty("paramId")) + if (message.paramId != null && Object.hasOwnProperty.call(message, "paramId")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.paramId); - if (message.displayName != null && message.hasOwnProperty("displayName")) + if (message.displayName != null && Object.hasOwnProperty.call(message, "displayName")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.displayName); - if (message.description != null && message.hasOwnProperty("description")) + if (message.description != null && Object.hasOwnProperty.call(message, "description")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.description); - if (message.type != null && message.hasOwnProperty("type")) + if (message.type != null && Object.hasOwnProperty.call(message, "type")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.type); - if (message.required != null && message.hasOwnProperty("required")) + if (message.required != null && Object.hasOwnProperty.call(message, "required")) writer.uint32(/* id 5, wireType 0 =*/40).bool(message.required); - if (message.repeated != null && message.hasOwnProperty("repeated")) + if (message.repeated != null && Object.hasOwnProperty.call(message, "repeated")) writer.uint32(/* id 6, wireType 0 =*/48).bool(message.repeated); - if (message.validationRegex != null && message.hasOwnProperty("validationRegex")) + if (message.validationRegex != null && Object.hasOwnProperty.call(message, "validationRegex")) writer.uint32(/* id 7, wireType 2 =*/58).string(message.validationRegex); if (message.allowedValues != null && message.allowedValues.length) for (var i = 0; i < message.allowedValues.length; ++i) writer.uint32(/* id 8, wireType 2 =*/66).string(message.allowedValues[i]); - if (message.minValue != null && message.hasOwnProperty("minValue")) + if (message.minValue != null && Object.hasOwnProperty.call(message, "minValue")) $root.google.protobuf.DoubleValue.encode(message.minValue, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); - if (message.maxValue != null && message.hasOwnProperty("maxValue")) + if (message.maxValue != null && Object.hasOwnProperty.call(message, "maxValue")) $root.google.protobuf.DoubleValue.encode(message.maxValue, writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); if (message.fields != null && message.fields.length) for (var i = 0; i < message.fields.length; ++i) $root.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.encode(message.fields[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); - if (message.validationDescription != null && message.hasOwnProperty("validationDescription")) + if (message.validationDescription != null && Object.hasOwnProperty.call(message, "validationDescription")) writer.uint32(/* id 12, wireType 2 =*/98).string(message.validationDescription); - if (message.validationHelpUrl != null && message.hasOwnProperty("validationHelpUrl")) + if (message.validationHelpUrl != null && Object.hasOwnProperty.call(message, "validationHelpUrl")) writer.uint32(/* id 13, wireType 2 =*/106).string(message.validationHelpUrl); - if (message.immutable != null && message.hasOwnProperty("immutable")) + if (message.immutable != null && Object.hasOwnProperty.call(message, "immutable")) writer.uint32(/* id 14, wireType 0 =*/112).bool(message.immutable); - if (message.recurse != null && message.hasOwnProperty("recurse")) + if (message.recurse != null && Object.hasOwnProperty.call(message, "recurse")) writer.uint32(/* id 15, wireType 0 =*/120).bool(message.recurse); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 20, wireType 0 =*/160).bool(message.deprecated); return writer; }; @@ -1177,7 +1177,7 @@ /** * Type enum. * @name google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type - * @enum {string} + * @enum {number} * @property {number} TYPE_UNSPECIFIED=0 TYPE_UNSPECIFIED value * @property {number} STRING=1 STRING value * @property {number} INTEGER=2 INTEGER value @@ -1412,43 +1412,43 @@ DataSource.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.dataSourceId != null && message.hasOwnProperty("dataSourceId")) + if (message.dataSourceId != null && Object.hasOwnProperty.call(message, "dataSourceId")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.dataSourceId); - if (message.displayName != null && message.hasOwnProperty("displayName")) + if (message.displayName != null && Object.hasOwnProperty.call(message, "displayName")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.displayName); - if (message.description != null && message.hasOwnProperty("description")) + if (message.description != null && Object.hasOwnProperty.call(message, "description")) writer.uint32(/* id 4, wireType 2 =*/34).string(message.description); - if (message.clientId != null && message.hasOwnProperty("clientId")) + if (message.clientId != null && Object.hasOwnProperty.call(message, "clientId")) writer.uint32(/* id 5, wireType 2 =*/42).string(message.clientId); if (message.scopes != null && message.scopes.length) for (var i = 0; i < message.scopes.length; ++i) writer.uint32(/* id 6, wireType 2 =*/50).string(message.scopes[i]); - if (message.transferType != null && message.hasOwnProperty("transferType")) + if (message.transferType != null && Object.hasOwnProperty.call(message, "transferType")) writer.uint32(/* id 7, wireType 0 =*/56).int32(message.transferType); - if (message.supportsMultipleTransfers != null && message.hasOwnProperty("supportsMultipleTransfers")) + if (message.supportsMultipleTransfers != null && Object.hasOwnProperty.call(message, "supportsMultipleTransfers")) writer.uint32(/* id 8, wireType 0 =*/64).bool(message.supportsMultipleTransfers); - if (message.updateDeadlineSeconds != null && message.hasOwnProperty("updateDeadlineSeconds")) + if (message.updateDeadlineSeconds != null && Object.hasOwnProperty.call(message, "updateDeadlineSeconds")) writer.uint32(/* id 9, wireType 0 =*/72).int32(message.updateDeadlineSeconds); - if (message.defaultSchedule != null && message.hasOwnProperty("defaultSchedule")) + if (message.defaultSchedule != null && Object.hasOwnProperty.call(message, "defaultSchedule")) writer.uint32(/* id 10, wireType 2 =*/82).string(message.defaultSchedule); - if (message.supportsCustomSchedule != null && message.hasOwnProperty("supportsCustomSchedule")) + if (message.supportsCustomSchedule != null && Object.hasOwnProperty.call(message, "supportsCustomSchedule")) writer.uint32(/* id 11, wireType 0 =*/88).bool(message.supportsCustomSchedule); if (message.parameters != null && message.parameters.length) for (var i = 0; i < message.parameters.length; ++i) $root.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.encode(message.parameters[i], writer.uint32(/* id 12, wireType 2 =*/98).fork()).ldelim(); - if (message.helpUrl != null && message.hasOwnProperty("helpUrl")) + if (message.helpUrl != null && Object.hasOwnProperty.call(message, "helpUrl")) writer.uint32(/* id 13, wireType 2 =*/106).string(message.helpUrl); - if (message.authorizationType != null && message.hasOwnProperty("authorizationType")) + if (message.authorizationType != null && Object.hasOwnProperty.call(message, "authorizationType")) writer.uint32(/* id 14, wireType 0 =*/112).int32(message.authorizationType); - if (message.dataRefreshType != null && message.hasOwnProperty("dataRefreshType")) + if (message.dataRefreshType != null && Object.hasOwnProperty.call(message, "dataRefreshType")) writer.uint32(/* id 15, wireType 0 =*/120).int32(message.dataRefreshType); - if (message.defaultDataRefreshWindowDays != null && message.hasOwnProperty("defaultDataRefreshWindowDays")) + if (message.defaultDataRefreshWindowDays != null && Object.hasOwnProperty.call(message, "defaultDataRefreshWindowDays")) writer.uint32(/* id 16, wireType 0 =*/128).int32(message.defaultDataRefreshWindowDays); - if (message.manualRunsDisabled != null && message.hasOwnProperty("manualRunsDisabled")) + if (message.manualRunsDisabled != null && Object.hasOwnProperty.call(message, "manualRunsDisabled")) writer.uint32(/* id 17, wireType 0 =*/136).bool(message.manualRunsDisabled); - if (message.minimumScheduleInterval != null && message.hasOwnProperty("minimumScheduleInterval")) + if (message.minimumScheduleInterval != null && Object.hasOwnProperty.call(message, "minimumScheduleInterval")) $root.google.protobuf.Duration.encode(message.minimumScheduleInterval, writer.uint32(/* id 18, wireType 2 =*/146).fork()).ldelim(); return writer; }; @@ -1866,7 +1866,7 @@ /** * AuthorizationType enum. * @name google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType - * @enum {string} + * @enum {number} * @property {number} AUTHORIZATION_TYPE_UNSPECIFIED=0 AUTHORIZATION_TYPE_UNSPECIFIED value * @property {number} AUTHORIZATION_CODE=1 AUTHORIZATION_CODE value * @property {number} GOOGLE_PLUS_AUTHORIZATION_CODE=2 GOOGLE_PLUS_AUTHORIZATION_CODE value @@ -1884,7 +1884,7 @@ /** * DataRefreshType enum. * @name google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType - * @enum {string} + * @enum {number} * @property {number} DATA_REFRESH_TYPE_UNSPECIFIED=0 DATA_REFRESH_TYPE_UNSPECIFIED value * @property {number} SLIDING_WINDOW=1 SLIDING_WINDOW value * @property {number} CUSTOM_SLIDING_WINDOW=2 CUSTOM_SLIDING_WINDOW value @@ -1956,7 +1956,7 @@ GetDataSourceRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; @@ -2161,11 +2161,11 @@ ListDataSourcesRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.pageToken != null && message.hasOwnProperty("pageToken")) + if (message.pageToken != null && Object.hasOwnProperty.call(message, "pageToken")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.pageToken); - if (message.pageSize != null && message.hasOwnProperty("pageSize")) + if (message.pageSize != null && Object.hasOwnProperty.call(message, "pageSize")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.pageSize); return writer; }; @@ -2388,7 +2388,7 @@ if (message.dataSources != null && message.dataSources.length) for (var i = 0; i < message.dataSources.length; ++i) $root.google.cloud.bigquery.datatransfer.v1.DataSource.encode(message.dataSources[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) + if (message.nextPageToken != null && Object.hasOwnProperty.call(message, "nextPageToken")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.nextPageToken); return writer; }; @@ -2642,15 +2642,15 @@ CreateTransferConfigRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.transferConfig != null && message.hasOwnProperty("transferConfig")) + if (message.transferConfig != null && Object.hasOwnProperty.call(message, "transferConfig")) $root.google.cloud.bigquery.datatransfer.v1.TransferConfig.encode(message.transferConfig, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.authorizationCode != null && message.hasOwnProperty("authorizationCode")) + if (message.authorizationCode != null && Object.hasOwnProperty.call(message, "authorizationCode")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.authorizationCode); - if (message.versionInfo != null && message.hasOwnProperty("versionInfo")) + if (message.versionInfo != null && Object.hasOwnProperty.call(message, "versionInfo")) writer.uint32(/* id 5, wireType 2 =*/42).string(message.versionInfo); - if (message.serviceAccountName != null && message.hasOwnProperty("serviceAccountName")) + if (message.serviceAccountName != null && Object.hasOwnProperty.call(message, "serviceAccountName")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.serviceAccountName); return writer; }; @@ -2923,15 +2923,15 @@ UpdateTransferConfigRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.transferConfig != null && message.hasOwnProperty("transferConfig")) + if (message.transferConfig != null && Object.hasOwnProperty.call(message, "transferConfig")) $root.google.cloud.bigquery.datatransfer.v1.TransferConfig.encode(message.transferConfig, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.authorizationCode != null && message.hasOwnProperty("authorizationCode")) + if (message.authorizationCode != null && Object.hasOwnProperty.call(message, "authorizationCode")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.authorizationCode); - if (message.updateMask != null && message.hasOwnProperty("updateMask")) + if (message.updateMask != null && Object.hasOwnProperty.call(message, "updateMask")) $root.google.protobuf.FieldMask.encode(message.updateMask, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.versionInfo != null && message.hasOwnProperty("versionInfo")) + if (message.versionInfo != null && Object.hasOwnProperty.call(message, "versionInfo")) writer.uint32(/* id 5, wireType 2 =*/42).string(message.versionInfo); - if (message.serviceAccountName != null && message.hasOwnProperty("serviceAccountName")) + if (message.serviceAccountName != null && Object.hasOwnProperty.call(message, "serviceAccountName")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.serviceAccountName); return writer; }; @@ -3173,7 +3173,7 @@ GetTransferConfigRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; @@ -3360,7 +3360,7 @@ DeleteTransferConfigRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; @@ -3547,7 +3547,7 @@ GetTransferRunRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; @@ -3734,7 +3734,7 @@ DeleteTransferRunRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; @@ -3949,14 +3949,14 @@ ListTransferConfigsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); if (message.dataSourceIds != null && message.dataSourceIds.length) for (var i = 0; i < message.dataSourceIds.length; ++i) writer.uint32(/* id 2, wireType 2 =*/18).string(message.dataSourceIds[i]); - if (message.pageToken != null && message.hasOwnProperty("pageToken")) + if (message.pageToken != null && Object.hasOwnProperty.call(message, "pageToken")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.pageToken); - if (message.pageSize != null && message.hasOwnProperty("pageSize")) + if (message.pageSize != null && Object.hasOwnProperty.call(message, "pageSize")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.pageSize); return writer; }; @@ -4205,7 +4205,7 @@ if (message.transferConfigs != null && message.transferConfigs.length) for (var i = 0; i < message.transferConfigs.length; ++i) $root.google.cloud.bigquery.datatransfer.v1.TransferConfig.encode(message.transferConfigs[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) + if (message.nextPageToken != null && Object.hasOwnProperty.call(message, "nextPageToken")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.nextPageToken); return writer; }; @@ -4460,7 +4460,7 @@ ListTransferRunsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); if (message.states != null && message.states.length) { writer.uint32(/* id 2, wireType 2 =*/18).fork(); @@ -4468,11 +4468,11 @@ writer.int32(message.states[i]); writer.ldelim(); } - if (message.pageToken != null && message.hasOwnProperty("pageToken")) + if (message.pageToken != null && Object.hasOwnProperty.call(message, "pageToken")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.pageToken); - if (message.pageSize != null && message.hasOwnProperty("pageSize")) + if (message.pageSize != null && Object.hasOwnProperty.call(message, "pageSize")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.pageSize); - if (message.runAttempt != null && message.hasOwnProperty("runAttempt")) + if (message.runAttempt != null && Object.hasOwnProperty.call(message, "runAttempt")) writer.uint32(/* id 5, wireType 0 =*/40).int32(message.runAttempt); return writer; }; @@ -4716,7 +4716,7 @@ /** * RunAttempt enum. * @name google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt - * @enum {string} + * @enum {number} * @property {number} RUN_ATTEMPT_UNSPECIFIED=0 RUN_ATTEMPT_UNSPECIFIED value * @property {number} LATEST=1 LATEST value */ @@ -4799,7 +4799,7 @@ if (message.transferRuns != null && message.transferRuns.length) for (var i = 0; i < message.transferRuns.length; ++i) $root.google.cloud.bigquery.datatransfer.v1.TransferRun.encode(message.transferRuns[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) + if (message.nextPageToken != null && Object.hasOwnProperty.call(message, "nextPageToken")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.nextPageToken); return writer; }; @@ -5045,11 +5045,11 @@ ListTransferLogsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.pageToken != null && message.hasOwnProperty("pageToken")) + if (message.pageToken != null && Object.hasOwnProperty.call(message, "pageToken")) writer.uint32(/* id 4, wireType 2 =*/34).string(message.pageToken); - if (message.pageSize != null && message.hasOwnProperty("pageSize")) + if (message.pageSize != null && Object.hasOwnProperty.call(message, "pageSize")) writer.uint32(/* id 5, wireType 0 =*/40).int32(message.pageSize); if (message.messageTypes != null && message.messageTypes.length) { writer.uint32(/* id 6, wireType 2 =*/50).fork(); @@ -5334,7 +5334,7 @@ if (message.transferMessages != null && message.transferMessages.length) for (var i = 0; i < message.transferMessages.length; ++i) $root.google.cloud.bigquery.datatransfer.v1.TransferMessage.encode(message.transferMessages[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.nextPageToken != null && message.hasOwnProperty("nextPageToken")) + if (message.nextPageToken != null && Object.hasOwnProperty.call(message, "nextPageToken")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.nextPageToken); return writer; }; @@ -5552,7 +5552,7 @@ CheckValidCredsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); return writer; }; @@ -5739,7 +5739,7 @@ CheckValidCredsResponse.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.hasValidCreds != null && message.hasOwnProperty("hasValidCreds")) + if (message.hasValidCreds != null && Object.hasOwnProperty.call(message, "hasValidCreds")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.hasValidCreds); return writer; }; @@ -5944,11 +5944,11 @@ ScheduleTransferRunsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.startTime != null && message.hasOwnProperty("startTime")) + if (message.startTime != null && Object.hasOwnProperty.call(message, "startTime")) $root.google.protobuf.Timestamp.encode(message.startTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.endTime != null && message.hasOwnProperty("endTime")) + if (message.endTime != null && Object.hasOwnProperty.call(message, "endTime")) $root.google.protobuf.Timestamp.encode(message.endTime, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -6408,11 +6408,11 @@ StartManualTransferRunsRequest.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.parent != null && message.hasOwnProperty("parent")) + if (message.parent != null && Object.hasOwnProperty.call(message, "parent")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.parent); - if (message.requestedTimeRange != null && message.hasOwnProperty("requestedTimeRange")) + if (message.requestedTimeRange != null && Object.hasOwnProperty.call(message, "requestedTimeRange")) $root.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange.encode(message.requestedTimeRange, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.requestedRunTime != null && message.hasOwnProperty("requestedRunTime")) + if (message.requestedRunTime != null && Object.hasOwnProperty.call(message, "requestedRunTime")) $root.google.protobuf.Timestamp.encode(message.requestedRunTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); return writer; }; @@ -6650,9 +6650,9 @@ TimeRange.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.startTime != null && message.hasOwnProperty("startTime")) + if (message.startTime != null && Object.hasOwnProperty.call(message, "startTime")) $root.google.protobuf.Timestamp.encode(message.startTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.endTime != null && message.hasOwnProperty("endTime")) + if (message.endTime != null && Object.hasOwnProperty.call(message, "endTime")) $root.google.protobuf.Timestamp.encode(message.endTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -7019,7 +7019,7 @@ /** * TransferType enum. * @name google.cloud.bigquery.datatransfer.v1.TransferType - * @enum {string} + * @enum {number} * @property {number} TRANSFER_TYPE_UNSPECIFIED=0 TRANSFER_TYPE_UNSPECIFIED value * @property {number} BATCH=1 BATCH value * @property {number} STREAMING=2 STREAMING value @@ -7035,7 +7035,7 @@ /** * TransferState enum. * @name google.cloud.bigquery.datatransfer.v1.TransferState - * @enum {string} + * @enum {number} * @property {number} TRANSFER_STATE_UNSPECIFIED=0 TRANSFER_STATE_UNSPECIFIED value * @property {number} PENDING=2 PENDING value * @property {number} RUNNING=3 RUNNING value @@ -7110,7 +7110,7 @@ EmailPreferences.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.enableFailureEmail != null && message.hasOwnProperty("enableFailureEmail")) + if (message.enableFailureEmail != null && Object.hasOwnProperty.call(message, "enableFailureEmail")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.enableFailureEmail); return writer; }; @@ -7315,11 +7315,11 @@ ScheduleOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.startTime != null && message.hasOwnProperty("startTime")) + if (message.startTime != null && Object.hasOwnProperty.call(message, "startTime")) $root.google.protobuf.Timestamp.encode(message.startTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.endTime != null && message.hasOwnProperty("endTime")) + if (message.endTime != null && Object.hasOwnProperty.call(message, "endTime")) $root.google.protobuf.Timestamp.encode(message.endTime, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.disableAutoScheduling != null && message.hasOwnProperty("disableAutoScheduling")) + if (message.disableAutoScheduling != null && Object.hasOwnProperty.call(message, "disableAutoScheduling")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.disableAutoScheduling); return writer; }; @@ -7688,37 +7688,37 @@ TransferConfig.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.destinationDatasetId != null && message.hasOwnProperty("destinationDatasetId")) + if (message.destinationDatasetId != null && Object.hasOwnProperty.call(message, "destinationDatasetId")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.destinationDatasetId); - if (message.displayName != null && message.hasOwnProperty("displayName")) + if (message.displayName != null && Object.hasOwnProperty.call(message, "displayName")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.displayName); - if (message.updateTime != null && message.hasOwnProperty("updateTime")) + if (message.updateTime != null && Object.hasOwnProperty.call(message, "updateTime")) $root.google.protobuf.Timestamp.encode(message.updateTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.dataSourceId != null && message.hasOwnProperty("dataSourceId")) + if (message.dataSourceId != null && Object.hasOwnProperty.call(message, "dataSourceId")) writer.uint32(/* id 5, wireType 2 =*/42).string(message.dataSourceId); - if (message.schedule != null && message.hasOwnProperty("schedule")) + if (message.schedule != null && Object.hasOwnProperty.call(message, "schedule")) writer.uint32(/* id 7, wireType 2 =*/58).string(message.schedule); - if (message.nextRunTime != null && message.hasOwnProperty("nextRunTime")) + if (message.nextRunTime != null && Object.hasOwnProperty.call(message, "nextRunTime")) $root.google.protobuf.Timestamp.encode(message.nextRunTime, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.params != null && message.hasOwnProperty("params")) + if (message.params != null && Object.hasOwnProperty.call(message, "params")) $root.google.protobuf.Struct.encode(message.params, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); - if (message.state != null && message.hasOwnProperty("state")) + if (message.state != null && Object.hasOwnProperty.call(message, "state")) writer.uint32(/* id 10, wireType 0 =*/80).int32(message.state); - if (message.userId != null && message.hasOwnProperty("userId")) + if (message.userId != null && Object.hasOwnProperty.call(message, "userId")) writer.uint32(/* id 11, wireType 0 =*/88).int64(message.userId); - if (message.dataRefreshWindowDays != null && message.hasOwnProperty("dataRefreshWindowDays")) + if (message.dataRefreshWindowDays != null && Object.hasOwnProperty.call(message, "dataRefreshWindowDays")) writer.uint32(/* id 12, wireType 0 =*/96).int32(message.dataRefreshWindowDays); - if (message.disabled != null && message.hasOwnProperty("disabled")) + if (message.disabled != null && Object.hasOwnProperty.call(message, "disabled")) writer.uint32(/* id 13, wireType 0 =*/104).bool(message.disabled); - if (message.datasetRegion != null && message.hasOwnProperty("datasetRegion")) + if (message.datasetRegion != null && Object.hasOwnProperty.call(message, "datasetRegion")) writer.uint32(/* id 14, wireType 2 =*/114).string(message.datasetRegion); - if (message.notificationPubsubTopic != null && message.hasOwnProperty("notificationPubsubTopic")) + if (message.notificationPubsubTopic != null && Object.hasOwnProperty.call(message, "notificationPubsubTopic")) writer.uint32(/* id 15, wireType 2 =*/122).string(message.notificationPubsubTopic); - if (message.emailPreferences != null && message.hasOwnProperty("emailPreferences")) + if (message.emailPreferences != null && Object.hasOwnProperty.call(message, "emailPreferences")) $root.google.cloud.bigquery.datatransfer.v1.EmailPreferences.encode(message.emailPreferences, writer.uint32(/* id 18, wireType 2 =*/146).fork()).ldelim(); - if (message.scheduleOptions != null && message.hasOwnProperty("scheduleOptions")) + if (message.scheduleOptions != null && Object.hasOwnProperty.call(message, "scheduleOptions")) $root.google.cloud.bigquery.datatransfer.v1.ScheduleOptions.encode(message.scheduleOptions, writer.uint32(/* id 24, wireType 2 =*/194).fork()).ldelim(); return writer; }; @@ -8288,35 +8288,35 @@ TransferRun.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.destinationDatasetId != null && message.hasOwnProperty("destinationDatasetId")) + if (message.destinationDatasetId != null && Object.hasOwnProperty.call(message, "destinationDatasetId")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.destinationDatasetId); - if (message.scheduleTime != null && message.hasOwnProperty("scheduleTime")) + if (message.scheduleTime != null && Object.hasOwnProperty.call(message, "scheduleTime")) $root.google.protobuf.Timestamp.encode(message.scheduleTime, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); - if (message.startTime != null && message.hasOwnProperty("startTime")) + if (message.startTime != null && Object.hasOwnProperty.call(message, "startTime")) $root.google.protobuf.Timestamp.encode(message.startTime, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.endTime != null && message.hasOwnProperty("endTime")) + if (message.endTime != null && Object.hasOwnProperty.call(message, "endTime")) $root.google.protobuf.Timestamp.encode(message.endTime, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.updateTime != null && message.hasOwnProperty("updateTime")) + if (message.updateTime != null && Object.hasOwnProperty.call(message, "updateTime")) $root.google.protobuf.Timestamp.encode(message.updateTime, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.dataSourceId != null && message.hasOwnProperty("dataSourceId")) + if (message.dataSourceId != null && Object.hasOwnProperty.call(message, "dataSourceId")) writer.uint32(/* id 7, wireType 2 =*/58).string(message.dataSourceId); - if (message.state != null && message.hasOwnProperty("state")) + if (message.state != null && Object.hasOwnProperty.call(message, "state")) writer.uint32(/* id 8, wireType 0 =*/64).int32(message.state); - if (message.params != null && message.hasOwnProperty("params")) + if (message.params != null && Object.hasOwnProperty.call(message, "params")) $root.google.protobuf.Struct.encode(message.params, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); - if (message.runTime != null && message.hasOwnProperty("runTime")) + if (message.runTime != null && Object.hasOwnProperty.call(message, "runTime")) $root.google.protobuf.Timestamp.encode(message.runTime, writer.uint32(/* id 10, wireType 2 =*/82).fork()).ldelim(); - if (message.userId != null && message.hasOwnProperty("userId")) + if (message.userId != null && Object.hasOwnProperty.call(message, "userId")) writer.uint32(/* id 11, wireType 0 =*/88).int64(message.userId); - if (message.schedule != null && message.hasOwnProperty("schedule")) + if (message.schedule != null && Object.hasOwnProperty.call(message, "schedule")) writer.uint32(/* id 12, wireType 2 =*/98).string(message.schedule); - if (message.errorStatus != null && message.hasOwnProperty("errorStatus")) + if (message.errorStatus != null && Object.hasOwnProperty.call(message, "errorStatus")) $root.google.rpc.Status.encode(message.errorStatus, writer.uint32(/* id 21, wireType 2 =*/170).fork()).ldelim(); - if (message.notificationPubsubTopic != null && message.hasOwnProperty("notificationPubsubTopic")) + if (message.notificationPubsubTopic != null && Object.hasOwnProperty.call(message, "notificationPubsubTopic")) writer.uint32(/* id 23, wireType 2 =*/186).string(message.notificationPubsubTopic); - if (message.emailPreferences != null && message.hasOwnProperty("emailPreferences")) + if (message.emailPreferences != null && Object.hasOwnProperty.call(message, "emailPreferences")) $root.google.cloud.bigquery.datatransfer.v1.EmailPreferences.encode(message.emailPreferences, writer.uint32(/* id 25, wireType 2 =*/202).fork()).ldelim(); return writer; }; @@ -8768,11 +8768,11 @@ TransferMessage.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.messageTime != null && message.hasOwnProperty("messageTime")) + if (message.messageTime != null && Object.hasOwnProperty.call(message, "messageTime")) $root.google.protobuf.Timestamp.encode(message.messageTime, writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.severity != null && message.hasOwnProperty("severity")) + if (message.severity != null && Object.hasOwnProperty.call(message, "severity")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.severity); - if (message.messageText != null && message.hasOwnProperty("messageText")) + if (message.messageText != null && Object.hasOwnProperty.call(message, "messageText")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.messageText); return writer; }; @@ -8954,7 +8954,7 @@ /** * MessageSeverity enum. * @name google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity - * @enum {string} + * @enum {number} * @property {number} MESSAGE_SEVERITY_UNSPECIFIED=0 MESSAGE_SEVERITY_UNSPECIFIED value * @property {number} INFO=1 INFO value * @property {number} WARNING=2 WARNING value @@ -9062,7 +9062,7 @@ if (message.rules != null && message.rules.length) for (var i = 0; i < message.rules.length; ++i) $root.google.api.HttpRule.encode(message.rules[i], writer.uint32(/* id 1, wireType 2 =*/10).fork()).ldelim(); - if (message.fullyDecodeReservedExpansion != null && message.hasOwnProperty("fullyDecodeReservedExpansion")) + if (message.fullyDecodeReservedExpansion != null && Object.hasOwnProperty.call(message, "fullyDecodeReservedExpansion")) writer.uint32(/* id 2, wireType 0 =*/16).bool(message.fullyDecodeReservedExpansion); return writer; }; @@ -9376,26 +9376,26 @@ HttpRule.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.selector != null && message.hasOwnProperty("selector")) + if (message.selector != null && Object.hasOwnProperty.call(message, "selector")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.selector); - if (message.get != null && message.hasOwnProperty("get")) + if (message.get != null && Object.hasOwnProperty.call(message, "get")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.get); - if (message.put != null && message.hasOwnProperty("put")) + if (message.put != null && Object.hasOwnProperty.call(message, "put")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.put); - if (message.post != null && message.hasOwnProperty("post")) + if (message.post != null && Object.hasOwnProperty.call(message, "post")) writer.uint32(/* id 4, wireType 2 =*/34).string(message.post); - if (message["delete"] != null && message.hasOwnProperty("delete")) + if (message["delete"] != null && Object.hasOwnProperty.call(message, "delete")) writer.uint32(/* id 5, wireType 2 =*/42).string(message["delete"]); - if (message.patch != null && message.hasOwnProperty("patch")) + if (message.patch != null && Object.hasOwnProperty.call(message, "patch")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.patch); - if (message.body != null && message.hasOwnProperty("body")) + if (message.body != null && Object.hasOwnProperty.call(message, "body")) writer.uint32(/* id 7, wireType 2 =*/58).string(message.body); - if (message.custom != null && message.hasOwnProperty("custom")) + if (message.custom != null && Object.hasOwnProperty.call(message, "custom")) $root.google.api.CustomHttpPattern.encode(message.custom, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); if (message.additionalBindings != null && message.additionalBindings.length) for (var i = 0; i < message.additionalBindings.length; ++i) $root.google.api.HttpRule.encode(message.additionalBindings[i], writer.uint32(/* id 11, wireType 2 =*/90).fork()).ldelim(); - if (message.responseBody != null && message.hasOwnProperty("responseBody")) + if (message.responseBody != null && Object.hasOwnProperty.call(message, "responseBody")) writer.uint32(/* id 12, wireType 2 =*/98).string(message.responseBody); return writer; }; @@ -9752,9 +9752,9 @@ CustomHttpPattern.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.kind != null && message.hasOwnProperty("kind")) + if (message.kind != null && Object.hasOwnProperty.call(message, "kind")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.kind); - if (message.path != null && message.hasOwnProperty("path")) + if (message.path != null && Object.hasOwnProperty.call(message, "path")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.path); return writer; }; @@ -9900,7 +9900,7 @@ /** * FieldBehavior enum. * @name google.api.FieldBehavior - * @enum {string} + * @enum {number} * @property {number} FIELD_BEHAVIOR_UNSPECIFIED=0 FIELD_BEHAVIOR_UNSPECIFIED value * @property {number} OPTIONAL=1 OPTIONAL value * @property {number} REQUIRED=2 REQUIRED value @@ -10021,18 +10021,18 @@ ResourceDescriptor.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.type != null && message.hasOwnProperty("type")) + if (message.type != null && Object.hasOwnProperty.call(message, "type")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); if (message.pattern != null && message.pattern.length) for (var i = 0; i < message.pattern.length; ++i) writer.uint32(/* id 2, wireType 2 =*/18).string(message.pattern[i]); - if (message.nameField != null && message.hasOwnProperty("nameField")) + if (message.nameField != null && Object.hasOwnProperty.call(message, "nameField")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.nameField); - if (message.history != null && message.hasOwnProperty("history")) + if (message.history != null && Object.hasOwnProperty.call(message, "history")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.history); - if (message.plural != null && message.hasOwnProperty("plural")) + if (message.plural != null && Object.hasOwnProperty.call(message, "plural")) writer.uint32(/* id 5, wireType 2 =*/42).string(message.plural); - if (message.singular != null && message.hasOwnProperty("singular")) + if (message.singular != null && Object.hasOwnProperty.call(message, "singular")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.singular); return writer; }; @@ -10252,7 +10252,7 @@ /** * History enum. * @name google.api.ResourceDescriptor.History - * @enum {string} + * @enum {number} * @property {number} HISTORY_UNSPECIFIED=0 HISTORY_UNSPECIFIED value * @property {number} ORIGINALLY_SINGLE_PATTERN=1 ORIGINALLY_SINGLE_PATTERN value * @property {number} FUTURE_MULTI_PATTERN=2 FUTURE_MULTI_PATTERN value @@ -10333,9 +10333,9 @@ ResourceReference.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.type != null && message.hasOwnProperty("type")) + if (message.type != null && Object.hasOwnProperty.call(message, "type")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.type); - if (message.childType != null && message.hasOwnProperty("childType")) + if (message.childType != null && Object.hasOwnProperty.call(message, "childType")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.childType); return writer; }; @@ -10860,9 +10860,9 @@ FileDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message["package"] != null && message.hasOwnProperty("package")) + if (message["package"] != null && Object.hasOwnProperty.call(message, "package")) writer.uint32(/* id 2, wireType 2 =*/18).string(message["package"]); if (message.dependency != null && message.dependency.length) for (var i = 0; i < message.dependency.length; ++i) @@ -10879,9 +10879,9 @@ if (message.extension != null && message.extension.length) for (var i = 0; i < message.extension.length; ++i) $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.FileOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.sourceCodeInfo != null && message.hasOwnProperty("sourceCodeInfo")) + if (message.sourceCodeInfo != null && Object.hasOwnProperty.call(message, "sourceCodeInfo")) $root.google.protobuf.SourceCodeInfo.encode(message.sourceCodeInfo, writer.uint32(/* id 9, wireType 2 =*/74).fork()).ldelim(); if (message.publicDependency != null && message.publicDependency.length) for (var i = 0; i < message.publicDependency.length; ++i) @@ -10889,7 +10889,7 @@ if (message.weakDependency != null && message.weakDependency.length) for (var i = 0; i < message.weakDependency.length; ++i) writer.uint32(/* id 11, wireType 0 =*/88).int32(message.weakDependency[i]); - if (message.syntax != null && message.hasOwnProperty("syntax")) + if (message.syntax != null && Object.hasOwnProperty.call(message, "syntax")) writer.uint32(/* id 12, wireType 2 =*/98).string(message.syntax); return writer; }; @@ -11427,7 +11427,7 @@ DescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); if (message.field != null && message.field.length) for (var i = 0; i < message.field.length; ++i) @@ -11444,7 +11444,7 @@ if (message.extension != null && message.extension.length) for (var i = 0; i < message.extension.length; ++i) $root.google.protobuf.FieldDescriptorProto.encode(message.extension[i], writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.MessageOptions.encode(message.options, writer.uint32(/* id 7, wireType 2 =*/58).fork()).ldelim(); if (message.oneofDecl != null && message.oneofDecl.length) for (var i = 0; i < message.oneofDecl.length; ++i) @@ -11909,11 +11909,11 @@ ExtensionRange.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.start != null && message.hasOwnProperty("start")) + if (message.start != null && Object.hasOwnProperty.call(message, "start")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && message.hasOwnProperty("end")) + if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.ExtensionRangeOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -12137,9 +12137,9 @@ ReservedRange.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.start != null && message.hasOwnProperty("start")) + if (message.start != null && Object.hasOwnProperty.call(message, "start")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && message.hasOwnProperty("end")) + if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); return writer; }; @@ -12630,25 +12630,25 @@ FieldDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.extendee != null && message.hasOwnProperty("extendee")) + if (message.extendee != null && Object.hasOwnProperty.call(message, "extendee")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.extendee); - if (message.number != null && message.hasOwnProperty("number")) + if (message.number != null && Object.hasOwnProperty.call(message, "number")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.number); - if (message.label != null && message.hasOwnProperty("label")) + if (message.label != null && Object.hasOwnProperty.call(message, "label")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.label); - if (message.type != null && message.hasOwnProperty("type")) + if (message.type != null && Object.hasOwnProperty.call(message, "type")) writer.uint32(/* id 5, wireType 0 =*/40).int32(message.type); - if (message.typeName != null && message.hasOwnProperty("typeName")) + if (message.typeName != null && Object.hasOwnProperty.call(message, "typeName")) writer.uint32(/* id 6, wireType 2 =*/50).string(message.typeName); - if (message.defaultValue != null && message.hasOwnProperty("defaultValue")) + if (message.defaultValue != null && Object.hasOwnProperty.call(message, "defaultValue")) writer.uint32(/* id 7, wireType 2 =*/58).string(message.defaultValue); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.FieldOptions.encode(message.options, writer.uint32(/* id 8, wireType 2 =*/66).fork()).ldelim(); - if (message.oneofIndex != null && message.hasOwnProperty("oneofIndex")) + if (message.oneofIndex != null && Object.hasOwnProperty.call(message, "oneofIndex")) writer.uint32(/* id 9, wireType 0 =*/72).int32(message.oneofIndex); - if (message.jsonName != null && message.hasOwnProperty("jsonName")) + if (message.jsonName != null && Object.hasOwnProperty.call(message, "jsonName")) writer.uint32(/* id 10, wireType 2 =*/82).string(message.jsonName); return writer; }; @@ -12995,7 +12995,7 @@ /** * Type enum. * @name google.protobuf.FieldDescriptorProto.Type - * @enum {string} + * @enum {number} * @property {number} TYPE_DOUBLE=1 TYPE_DOUBLE value * @property {number} TYPE_FLOAT=2 TYPE_FLOAT value * @property {number} TYPE_INT64=3 TYPE_INT64 value @@ -13041,7 +13041,7 @@ /** * Label enum. * @name google.protobuf.FieldDescriptorProto.Label - * @enum {string} + * @enum {number} * @property {number} LABEL_OPTIONAL=1 LABEL_OPTIONAL value * @property {number} LABEL_REQUIRED=2 LABEL_REQUIRED value * @property {number} LABEL_REPEATED=3 LABEL_REPEATED value @@ -13122,9 +13122,9 @@ OneofDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.OneofOptions.encode(message.options, writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); return writer; }; @@ -13367,12 +13367,12 @@ EnumDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); if (message.value != null && message.value.length) for (var i = 0; i < message.value.length; ++i) $root.google.protobuf.EnumValueDescriptorProto.encode(message.value[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.EnumOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); if (message.reservedRange != null && message.reservedRange.length) for (var i = 0; i < message.reservedRange.length; ++i) @@ -13675,9 +13675,9 @@ EnumReservedRange.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.start != null && message.hasOwnProperty("start")) + if (message.start != null && Object.hasOwnProperty.call(message, "start")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.start); - if (message.end != null && message.hasOwnProperty("end")) + if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.end); return writer; }; @@ -13897,11 +13897,11 @@ EnumValueDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.number != null && message.hasOwnProperty("number")) + if (message.number != null && Object.hasOwnProperty.call(message, "number")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.number); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.EnumValueOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -14135,12 +14135,12 @@ ServiceDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); if (message.method != null && message.method.length) for (var i = 0; i < message.method.length; ++i) $root.google.protobuf.MethodDescriptorProto.encode(message.method[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.ServiceOptions.encode(message.options, writer.uint32(/* id 3, wireType 2 =*/26).fork()).ldelim(); return writer; }; @@ -14420,17 +14420,17 @@ MethodDescriptorProto.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.name != null && message.hasOwnProperty("name")) + if (message.name != null && Object.hasOwnProperty.call(message, "name")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.name); - if (message.inputType != null && message.hasOwnProperty("inputType")) + if (message.inputType != null && Object.hasOwnProperty.call(message, "inputType")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.inputType); - if (message.outputType != null && message.hasOwnProperty("outputType")) + if (message.outputType != null && Object.hasOwnProperty.call(message, "outputType")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.outputType); - if (message.options != null && message.hasOwnProperty("options")) + if (message.options != null && Object.hasOwnProperty.call(message, "options")) $root.google.protobuf.MethodOptions.encode(message.options, writer.uint32(/* id 4, wireType 2 =*/34).fork()).ldelim(); - if (message.clientStreaming != null && message.hasOwnProperty("clientStreaming")) + if (message.clientStreaming != null && Object.hasOwnProperty.call(message, "clientStreaming")) writer.uint32(/* id 5, wireType 0 =*/40).bool(message.clientStreaming); - if (message.serverStreaming != null && message.hasOwnProperty("serverStreaming")) + if (message.serverStreaming != null && Object.hasOwnProperty.call(message, "serverStreaming")) writer.uint32(/* id 6, wireType 0 =*/48).bool(message.serverStreaming); return writer; }; @@ -14869,45 +14869,45 @@ FileOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.javaPackage != null && message.hasOwnProperty("javaPackage")) + if (message.javaPackage != null && Object.hasOwnProperty.call(message, "javaPackage")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.javaPackage); - if (message.javaOuterClassname != null && message.hasOwnProperty("javaOuterClassname")) + if (message.javaOuterClassname != null && Object.hasOwnProperty.call(message, "javaOuterClassname")) writer.uint32(/* id 8, wireType 2 =*/66).string(message.javaOuterClassname); - if (message.optimizeFor != null && message.hasOwnProperty("optimizeFor")) + if (message.optimizeFor != null && Object.hasOwnProperty.call(message, "optimizeFor")) writer.uint32(/* id 9, wireType 0 =*/72).int32(message.optimizeFor); - if (message.javaMultipleFiles != null && message.hasOwnProperty("javaMultipleFiles")) + if (message.javaMultipleFiles != null && Object.hasOwnProperty.call(message, "javaMultipleFiles")) writer.uint32(/* id 10, wireType 0 =*/80).bool(message.javaMultipleFiles); - if (message.goPackage != null && message.hasOwnProperty("goPackage")) + if (message.goPackage != null && Object.hasOwnProperty.call(message, "goPackage")) writer.uint32(/* id 11, wireType 2 =*/90).string(message.goPackage); - if (message.ccGenericServices != null && message.hasOwnProperty("ccGenericServices")) + if (message.ccGenericServices != null && Object.hasOwnProperty.call(message, "ccGenericServices")) writer.uint32(/* id 16, wireType 0 =*/128).bool(message.ccGenericServices); - if (message.javaGenericServices != null && message.hasOwnProperty("javaGenericServices")) + if (message.javaGenericServices != null && Object.hasOwnProperty.call(message, "javaGenericServices")) writer.uint32(/* id 17, wireType 0 =*/136).bool(message.javaGenericServices); - if (message.pyGenericServices != null && message.hasOwnProperty("pyGenericServices")) + if (message.pyGenericServices != null && Object.hasOwnProperty.call(message, "pyGenericServices")) writer.uint32(/* id 18, wireType 0 =*/144).bool(message.pyGenericServices); - if (message.javaGenerateEqualsAndHash != null && message.hasOwnProperty("javaGenerateEqualsAndHash")) + if (message.javaGenerateEqualsAndHash != null && Object.hasOwnProperty.call(message, "javaGenerateEqualsAndHash")) writer.uint32(/* id 20, wireType 0 =*/160).bool(message.javaGenerateEqualsAndHash); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 23, wireType 0 =*/184).bool(message.deprecated); - if (message.javaStringCheckUtf8 != null && message.hasOwnProperty("javaStringCheckUtf8")) + if (message.javaStringCheckUtf8 != null && Object.hasOwnProperty.call(message, "javaStringCheckUtf8")) writer.uint32(/* id 27, wireType 0 =*/216).bool(message.javaStringCheckUtf8); - if (message.ccEnableArenas != null && message.hasOwnProperty("ccEnableArenas")) + if (message.ccEnableArenas != null && Object.hasOwnProperty.call(message, "ccEnableArenas")) writer.uint32(/* id 31, wireType 0 =*/248).bool(message.ccEnableArenas); - if (message.objcClassPrefix != null && message.hasOwnProperty("objcClassPrefix")) + if (message.objcClassPrefix != null && Object.hasOwnProperty.call(message, "objcClassPrefix")) writer.uint32(/* id 36, wireType 2 =*/290).string(message.objcClassPrefix); - if (message.csharpNamespace != null && message.hasOwnProperty("csharpNamespace")) + if (message.csharpNamespace != null && Object.hasOwnProperty.call(message, "csharpNamespace")) writer.uint32(/* id 37, wireType 2 =*/298).string(message.csharpNamespace); - if (message.swiftPrefix != null && message.hasOwnProperty("swiftPrefix")) + if (message.swiftPrefix != null && Object.hasOwnProperty.call(message, "swiftPrefix")) writer.uint32(/* id 39, wireType 2 =*/314).string(message.swiftPrefix); - if (message.phpClassPrefix != null && message.hasOwnProperty("phpClassPrefix")) + if (message.phpClassPrefix != null && Object.hasOwnProperty.call(message, "phpClassPrefix")) writer.uint32(/* id 40, wireType 2 =*/322).string(message.phpClassPrefix); - if (message.phpNamespace != null && message.hasOwnProperty("phpNamespace")) + if (message.phpNamespace != null && Object.hasOwnProperty.call(message, "phpNamespace")) writer.uint32(/* id 41, wireType 2 =*/330).string(message.phpNamespace); - if (message.phpGenericServices != null && message.hasOwnProperty("phpGenericServices")) + if (message.phpGenericServices != null && Object.hasOwnProperty.call(message, "phpGenericServices")) writer.uint32(/* id 42, wireType 0 =*/336).bool(message.phpGenericServices); - if (message.phpMetadataNamespace != null && message.hasOwnProperty("phpMetadataNamespace")) + if (message.phpMetadataNamespace != null && Object.hasOwnProperty.call(message, "phpMetadataNamespace")) writer.uint32(/* id 44, wireType 2 =*/354).string(message.phpMetadataNamespace); - if (message.rubyPackage != null && message.hasOwnProperty("rubyPackage")) + if (message.rubyPackage != null && Object.hasOwnProperty.call(message, "rubyPackage")) writer.uint32(/* id 45, wireType 2 =*/362).string(message.rubyPackage); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -15334,7 +15334,7 @@ /** * OptimizeMode enum. * @name google.protobuf.FileOptions.OptimizeMode - * @enum {string} + * @enum {number} * @property {number} SPEED=1 SPEED value * @property {number} CODE_SIZE=2 CODE_SIZE value * @property {number} LITE_RUNTIME=3 LITE_RUNTIME value @@ -15452,18 +15452,18 @@ MessageOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.messageSetWireFormat != null && message.hasOwnProperty("messageSetWireFormat")) + if (message.messageSetWireFormat != null && Object.hasOwnProperty.call(message, "messageSetWireFormat")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.messageSetWireFormat); - if (message.noStandardDescriptorAccessor != null && message.hasOwnProperty("noStandardDescriptorAccessor")) + if (message.noStandardDescriptorAccessor != null && Object.hasOwnProperty.call(message, "noStandardDescriptorAccessor")) writer.uint32(/* id 2, wireType 0 =*/16).bool(message.noStandardDescriptorAccessor); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.mapEntry != null && message.hasOwnProperty("mapEntry")) + if (message.mapEntry != null && Object.hasOwnProperty.call(message, "mapEntry")) writer.uint32(/* id 7, wireType 0 =*/56).bool(message.mapEntry); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.resource"] != null && message.hasOwnProperty(".google.api.resource")) + if (message[".google.api.resource"] != null && Object.hasOwnProperty.call(message, ".google.api.resource")) $root.google.api.ResourceDescriptor.encode(message[".google.api.resource"], writer.uint32(/* id 1053, wireType 2 =*/8426).fork()).ldelim(); return writer; }; @@ -15805,17 +15805,17 @@ FieldOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.ctype != null && message.hasOwnProperty("ctype")) + if (message.ctype != null && Object.hasOwnProperty.call(message, "ctype")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.ctype); - if (message.packed != null && message.hasOwnProperty("packed")) + if (message.packed != null && Object.hasOwnProperty.call(message, "packed")) writer.uint32(/* id 2, wireType 0 =*/16).bool(message.packed); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); - if (message.lazy != null && message.hasOwnProperty("lazy")) + if (message.lazy != null && Object.hasOwnProperty.call(message, "lazy")) writer.uint32(/* id 5, wireType 0 =*/40).bool(message.lazy); - if (message.jstype != null && message.hasOwnProperty("jstype")) + if (message.jstype != null && Object.hasOwnProperty.call(message, "jstype")) writer.uint32(/* id 6, wireType 0 =*/48).int32(message.jstype); - if (message.weak != null && message.hasOwnProperty("weak")) + if (message.weak != null && Object.hasOwnProperty.call(message, "weak")) writer.uint32(/* id 10, wireType 0 =*/80).bool(message.weak); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -15826,7 +15826,7 @@ writer.int32(message[".google.api.fieldBehavior"][i]); writer.ldelim(); } - if (message[".google.api.resourceReference"] != null && message.hasOwnProperty(".google.api.resourceReference")) + if (message[".google.api.resourceReference"] != null && Object.hasOwnProperty.call(message, ".google.api.resourceReference")) $root.google.api.ResourceReference.encode(message[".google.api.resourceReference"], writer.uint32(/* id 1055, wireType 2 =*/8442).fork()).ldelim(); return writer; }; @@ -16162,7 +16162,7 @@ /** * CType enum. * @name google.protobuf.FieldOptions.CType - * @enum {string} + * @enum {number} * @property {number} STRING=0 STRING value * @property {number} CORD=1 CORD value * @property {number} STRING_PIECE=2 STRING_PIECE value @@ -16178,7 +16178,7 @@ /** * JSType enum. * @name google.protobuf.FieldOptions.JSType - * @enum {string} + * @enum {number} * @property {number} JS_NORMAL=0 JS_NORMAL value * @property {number} JS_STRING=1 JS_STRING value * @property {number} JS_NUMBER=2 JS_NUMBER value @@ -16477,9 +16477,9 @@ EnumOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.allowAlias != null && message.hasOwnProperty("allowAlias")) + if (message.allowAlias != null && Object.hasOwnProperty.call(message, "allowAlias")) writer.uint32(/* id 2, wireType 0 =*/16).bool(message.allowAlias); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 3, wireType 0 =*/24).bool(message.deprecated); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -16722,7 +16722,7 @@ EnumValueOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.deprecated); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -16971,14 +16971,14 @@ ServiceOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) $root.google.protobuf.UninterpretedOption.encode(message.uninterpretedOption[i], writer.uint32(/* id 999, wireType 2 =*/7994).fork()).ldelim(); - if (message[".google.api.defaultHost"] != null && message.hasOwnProperty(".google.api.defaultHost")) + if (message[".google.api.defaultHost"] != null && Object.hasOwnProperty.call(message, ".google.api.defaultHost")) writer.uint32(/* id 1049, wireType 2 =*/8394).string(message[".google.api.defaultHost"]); - if (message[".google.api.oauthScopes"] != null && message.hasOwnProperty(".google.api.oauthScopes")) + if (message[".google.api.oauthScopes"] != null && Object.hasOwnProperty.call(message, ".google.api.oauthScopes")) writer.uint32(/* id 1050, wireType 2 =*/8402).string(message[".google.api.oauthScopes"]); return writer; }; @@ -17257,9 +17257,9 @@ MethodOptions.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.deprecated != null && message.hasOwnProperty("deprecated")) + if (message.deprecated != null && Object.hasOwnProperty.call(message, "deprecated")) writer.uint32(/* id 33, wireType 0 =*/264).bool(message.deprecated); - if (message.idempotencyLevel != null && message.hasOwnProperty("idempotencyLevel")) + if (message.idempotencyLevel != null && Object.hasOwnProperty.call(message, "idempotencyLevel")) writer.uint32(/* id 34, wireType 0 =*/272).int32(message.idempotencyLevel); if (message.uninterpretedOption != null && message.uninterpretedOption.length) for (var i = 0; i < message.uninterpretedOption.length; ++i) @@ -17267,7 +17267,7 @@ if (message[".google.api.methodSignature"] != null && message[".google.api.methodSignature"].length) for (var i = 0; i < message[".google.api.methodSignature"].length; ++i) writer.uint32(/* id 1051, wireType 2 =*/8410).string(message[".google.api.methodSignature"][i]); - if (message[".google.api.http"] != null && message.hasOwnProperty(".google.api.http")) + if (message[".google.api.http"] != null && Object.hasOwnProperty.call(message, ".google.api.http")) $root.google.api.HttpRule.encode(message[".google.api.http"], writer.uint32(/* id 72295728, wireType 2 =*/578365826).fork()).ldelim(); return writer; }; @@ -17501,7 +17501,7 @@ /** * IdempotencyLevel enum. * @name google.protobuf.MethodOptions.IdempotencyLevel - * @enum {string} + * @enum {number} * @property {number} IDEMPOTENCY_UNKNOWN=0 IDEMPOTENCY_UNKNOWN value * @property {number} NO_SIDE_EFFECTS=1 NO_SIDE_EFFECTS value * @property {number} IDEMPOTENT=2 IDEMPOTENT value @@ -17631,17 +17631,17 @@ if (message.name != null && message.name.length) for (var i = 0; i < message.name.length; ++i) $root.google.protobuf.UninterpretedOption.NamePart.encode(message.name[i], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim(); - if (message.identifierValue != null && message.hasOwnProperty("identifierValue")) + if (message.identifierValue != null && Object.hasOwnProperty.call(message, "identifierValue")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.identifierValue); - if (message.positiveIntValue != null && message.hasOwnProperty("positiveIntValue")) + if (message.positiveIntValue != null && Object.hasOwnProperty.call(message, "positiveIntValue")) writer.uint32(/* id 4, wireType 0 =*/32).uint64(message.positiveIntValue); - if (message.negativeIntValue != null && message.hasOwnProperty("negativeIntValue")) + if (message.negativeIntValue != null && Object.hasOwnProperty.call(message, "negativeIntValue")) writer.uint32(/* id 5, wireType 0 =*/40).int64(message.negativeIntValue); - if (message.doubleValue != null && message.hasOwnProperty("doubleValue")) + if (message.doubleValue != null && Object.hasOwnProperty.call(message, "doubleValue")) writer.uint32(/* id 6, wireType 1 =*/49).double(message.doubleValue); - if (message.stringValue != null && message.hasOwnProperty("stringValue")) + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) writer.uint32(/* id 7, wireType 2 =*/58).bytes(message.stringValue); - if (message.aggregateValue != null && message.hasOwnProperty("aggregateValue")) + if (message.aggregateValue != null && Object.hasOwnProperty.call(message, "aggregateValue")) writer.uint32(/* id 8, wireType 2 =*/66).string(message.aggregateValue); return writer; }; @@ -18418,9 +18418,9 @@ writer.int32(message.span[i]); writer.ldelim(); } - if (message.leadingComments != null && message.hasOwnProperty("leadingComments")) + if (message.leadingComments != null && Object.hasOwnProperty.call(message, "leadingComments")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.leadingComments); - if (message.trailingComments != null && message.hasOwnProperty("trailingComments")) + if (message.trailingComments != null && Object.hasOwnProperty.call(message, "trailingComments")) writer.uint32(/* id 4, wireType 2 =*/34).string(message.trailingComments); if (message.leadingDetachedComments != null && message.leadingDetachedComments.length) for (var i = 0; i < message.leadingDetachedComments.length; ++i) @@ -18951,11 +18951,11 @@ writer.int32(message.path[i]); writer.ldelim(); } - if (message.sourceFile != null && message.hasOwnProperty("sourceFile")) + if (message.sourceFile != null && Object.hasOwnProperty.call(message, "sourceFile")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.sourceFile); - if (message.begin != null && message.hasOwnProperty("begin")) + if (message.begin != null && Object.hasOwnProperty.call(message, "begin")) writer.uint32(/* id 3, wireType 0 =*/24).int32(message.begin); - if (message.end != null && message.hasOwnProperty("end")) + if (message.end != null && Object.hasOwnProperty.call(message, "end")) writer.uint32(/* id 4, wireType 0 =*/32).int32(message.end); return writer; }; @@ -19200,7 +19200,7 @@ Struct.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.fields != null && message.hasOwnProperty("fields")) + if (message.fields != null && Object.hasOwnProperty.call(message, "fields")) for (var keys = Object.keys(message.fields), i = 0; i < keys.length; ++i) { writer.uint32(/* id 1, wireType 2 =*/10).fork().uint32(/* id 1, wireType 2 =*/10).string(keys[i]); $root.google.protobuf.Value.encode(message.fields[keys[i]], writer.uint32(/* id 2, wireType 2 =*/18).fork()).ldelim().ldelim(); @@ -19473,17 +19473,17 @@ Value.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.nullValue != null && message.hasOwnProperty("nullValue")) + if (message.nullValue != null && Object.hasOwnProperty.call(message, "nullValue")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.nullValue); - if (message.numberValue != null && message.hasOwnProperty("numberValue")) + if (message.numberValue != null && Object.hasOwnProperty.call(message, "numberValue")) writer.uint32(/* id 2, wireType 1 =*/17).double(message.numberValue); - if (message.stringValue != null && message.hasOwnProperty("stringValue")) + if (message.stringValue != null && Object.hasOwnProperty.call(message, "stringValue")) writer.uint32(/* id 3, wireType 2 =*/26).string(message.stringValue); - if (message.boolValue != null && message.hasOwnProperty("boolValue")) + if (message.boolValue != null && Object.hasOwnProperty.call(message, "boolValue")) writer.uint32(/* id 4, wireType 0 =*/32).bool(message.boolValue); - if (message.structValue != null && message.hasOwnProperty("structValue")) + if (message.structValue != null && Object.hasOwnProperty.call(message, "structValue")) $root.google.protobuf.Struct.encode(message.structValue, writer.uint32(/* id 5, wireType 2 =*/42).fork()).ldelim(); - if (message.listValue != null && message.hasOwnProperty("listValue")) + if (message.listValue != null && Object.hasOwnProperty.call(message, "listValue")) $root.google.protobuf.ListValue.encode(message.listValue, writer.uint32(/* id 6, wireType 2 =*/50).fork()).ldelim(); return writer; }; @@ -19726,7 +19726,7 @@ /** * NullValue enum. * @name google.protobuf.NullValue - * @enum {string} + * @enum {number} * @property {number} NULL_VALUE=0 NULL_VALUE value */ protobuf.NullValue = (function() { @@ -20008,9 +20008,9 @@ Timestamp.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.seconds != null && message.hasOwnProperty("seconds")) + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); - if (message.nanos != null && message.hasOwnProperty("nanos")) + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); return writer; }; @@ -20232,9 +20232,9 @@ Any.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.type_url != null && message.hasOwnProperty("type_url")) + if (message.type_url != null && Object.hasOwnProperty.call(message, "type_url")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.type_url); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 2, wireType 2 =*/18).bytes(message.value); return writer; }; @@ -20451,9 +20451,9 @@ Duration.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.seconds != null && message.hasOwnProperty("seconds")) + if (message.seconds != null && Object.hasOwnProperty.call(message, "seconds")) writer.uint32(/* id 1, wireType 0 =*/8).int64(message.seconds); - if (message.nanos != null && message.hasOwnProperty("nanos")) + if (message.nanos != null && Object.hasOwnProperty.call(message, "nanos")) writer.uint32(/* id 2, wireType 0 =*/16).int32(message.nanos); return writer; }; @@ -21029,7 +21029,7 @@ DoubleValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 1 =*/9).double(message.value); return writer; }; @@ -21216,7 +21216,7 @@ FloatValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 5 =*/13).float(message.value); return writer; }; @@ -21403,7 +21403,7 @@ Int64Value.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 0 =*/8).int64(message.value); return writer; }; @@ -21604,7 +21604,7 @@ UInt64Value.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 0 =*/8).uint64(message.value); return writer; }; @@ -21805,7 +21805,7 @@ Int32Value.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.value); return writer; }; @@ -21992,7 +21992,7 @@ UInt32Value.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 0 =*/8).uint32(message.value); return writer; }; @@ -22179,7 +22179,7 @@ BoolValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 0 =*/8).bool(message.value); return writer; }; @@ -22366,7 +22366,7 @@ StringValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 2 =*/10).string(message.value); return writer; }; @@ -22553,7 +22553,7 @@ BytesValue.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.value != null && message.hasOwnProperty("value")) + if (message.value != null && Object.hasOwnProperty.call(message, "value")) writer.uint32(/* id 1, wireType 2 =*/10).bytes(message.value); return writer; }; @@ -22780,9 +22780,9 @@ Status.encode = function encode(message, writer) { if (!writer) writer = $Writer.create(); - if (message.code != null && message.hasOwnProperty("code")) + if (message.code != null && Object.hasOwnProperty.call(message, "code")) writer.uint32(/* id 1, wireType 0 =*/8).int32(message.code); - if (message.message != null && message.hasOwnProperty("message")) + if (message.message != null && Object.hasOwnProperty.call(message, "message")) writer.uint32(/* id 2, wireType 2 =*/18).string(message.message); if (message.details != null && message.details.length) for (var i = 0; i < message.details.length; ++i) diff --git a/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client.ts b/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client.ts index f6c7849d8ce..91a6cc0a25c 100644 --- a/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client.ts +++ b/packages/google-cloud-bigquery-datatransfer/src/v1/data_transfer_service_client.ts @@ -17,11 +17,18 @@ // ** All changes to this file may be overwritten. ** import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; +import { + Callback, + CallOptions, + Descriptors, + ClientOptions, + PaginationCallback, + GaxCall, +} from 'google-gax'; import * as path from 'path'; -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; +import {Transform} from 'stream'; +import {RequestType} from 'google-gax/build/src/apitypes'; import * as protos from '../../protos/protos'; import * as gapicConfig from './data_transfer_service_client_config.json'; @@ -43,7 +50,12 @@ export class DataTransferServiceClient { private _protos: {}; private _defaults: {[method: string]: gax.CallSettings}; auth: gax.GoogleAuth; - descriptors: Descriptors = {page: {}, stream: {}, longrunning: {}, batching: {}}; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; innerApiCalls: {[name: string]: Function}; pathTemplates: {[name: string]: gax.PathTemplate}; dataTransferServiceStub?: Promise<{[name: string]: Function}>; @@ -77,10 +89,12 @@ export class DataTransferServiceClient { constructor(opts?: ClientOptions) { // Ensure that options include the service address and port. const staticMembers = this.constructor as typeof DataTransferServiceClient; - const servicePath = opts && opts.servicePath ? - opts.servicePath : - ((opts && opts.apiEndpoint) ? opts.apiEndpoint : - staticMembers.servicePath); + const servicePath = + opts && opts.servicePath + ? opts.servicePath + : opts && opts.apiEndpoint + ? opts.apiEndpoint + : staticMembers.servicePath; const port = opts && opts.port ? opts.port : staticMembers.port; if (!opts) { @@ -90,8 +104,8 @@ export class DataTransferServiceClient { opts.port = opts.port || port; opts.clientConfig = opts.clientConfig || {}; - const isBrowser = (typeof window !== 'undefined'); - if (isBrowser){ + const isBrowser = typeof window !== 'undefined'; + if (isBrowser) { opts.fallback = true; } // If we are in browser, we are already using fallback because of the @@ -108,13 +122,10 @@ export class DataTransferServiceClient { this._opts = opts; // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + this.auth = this._gaxGrpc.auth as gax.GoogleAuth; // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; + const clientHeader = [`gax/${this._gaxModule.version}`, `gapic/${version}`]; if (typeof process !== 'undefined' && 'versions' in process) { clientHeader.push(`gl-node/${process.versions.node}`); } else { @@ -130,12 +141,18 @@ export class DataTransferServiceClient { // For Node.js, pass the path to JSON proto file. // For browsers, pass the JSON content. - const nodejsProtoPath = path.join(__dirname, '..', '..', 'protos', 'protos.json'); + const nodejsProtoPath = path.join( + __dirname, + '..', + '..', + 'protos', + 'protos.json' + ); this._protos = this._gaxGrpc.loadProto( - opts.fallback ? - // eslint-disable-next-line @typescript-eslint/no-var-requires - require("../../protos/protos.json") : - nodejsProtoPath + opts.fallback + ? // eslint-disable-next-line @typescript-eslint/no-var-requires + require('../../protos/protos.json') + : nodejsProtoPath ); // This API contains "path templates"; forward-slash-separated @@ -169,20 +186,35 @@ export class DataTransferServiceClient { // (e.g. 50 results at a time, with tokens to get subsequent // pages). Denote the keys used for pagination and results. this.descriptors.page = { - listDataSources: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'dataSources'), - listTransferConfigs: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'transferConfigs'), - listTransferRuns: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'transferRuns'), - listTransferLogs: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'transferMessages') + listDataSources: new this._gaxModule.PageDescriptor( + 'pageToken', + 'nextPageToken', + 'dataSources' + ), + listTransferConfigs: new this._gaxModule.PageDescriptor( + 'pageToken', + 'nextPageToken', + 'transferConfigs' + ), + listTransferRuns: new this._gaxModule.PageDescriptor( + 'pageToken', + 'nextPageToken', + 'transferRuns' + ), + listTransferLogs: new this._gaxModule.PageDescriptor( + 'pageToken', + 'nextPageToken', + 'transferMessages' + ), }; // Put together the default options sent with requests. this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.bigquery.datatransfer.v1.DataTransferService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + 'google.cloud.bigquery.datatransfer.v1.DataTransferService', + gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, + {'x-goog-api-client': clientHeader.join(' ')} + ); // Set up a dictionary of "inner API calls"; the core implementation // of calling the API is handled in `google-gax`, with this code @@ -210,16 +242,34 @@ export class DataTransferServiceClient { // Put together the "service stub" for // google.cloud.bigquery.datatransfer.v1.DataTransferService. this.dataTransferServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.bigquery.datatransfer.v1.DataTransferService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.bigquery.datatransfer.v1.DataTransferService, - this._opts) as Promise<{[method: string]: Function}>; + this._opts.fallback + ? (this._protos as protobuf.Root).lookupService( + 'google.cloud.bigquery.datatransfer.v1.DataTransferService' + ) + : // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.bigquery.datatransfer.v1 + .DataTransferService, + this._opts + ) as Promise<{[method: string]: Function}>; // Iterate over each of the methods that the service provides // and create an API call method for each. - const dataTransferServiceStubMethods = - ['getDataSource', 'listDataSources', 'createTransferConfig', 'updateTransferConfig', 'deleteTransferConfig', 'getTransferConfig', 'listTransferConfigs', 'scheduleTransferRuns', 'startManualTransferRuns', 'getTransferRun', 'deleteTransferRun', 'listTransferRuns', 'listTransferLogs', 'checkValidCreds']; + const dataTransferServiceStubMethods = [ + 'getDataSource', + 'listDataSources', + 'createTransferConfig', + 'updateTransferConfig', + 'deleteTransferConfig', + 'getTransferConfig', + 'listTransferConfigs', + 'scheduleTransferRuns', + 'startManualTransferRuns', + 'getTransferRun', + 'deleteTransferRun', + 'listTransferRuns', + 'listTransferLogs', + 'checkValidCreds', + ]; for (const methodName of dataTransferServiceStubMethods) { const callPromise = this.dataTransferServiceStub.then( stub => (...args: Array<{}>) => { @@ -229,16 +279,17 @@ export class DataTransferServiceClient { const func = stub[methodName]; return func.apply(stub, args); }, - (err: Error|null|undefined) => () => { + (err: Error | null | undefined) => () => { throw err; - }); + } + ); const apiCall = this._gaxModule.createApiCall( callPromise, this._defaults[methodName], this.descriptors.page[methodName] || - this.descriptors.stream[methodName] || - this.descriptors.longrunning[methodName] + this.descriptors.stream[methodName] || + this.descriptors.longrunning[methodName] ); this.innerApiCalls[methodName] = apiCall; @@ -274,9 +325,7 @@ export class DataTransferServiceClient { * in this service. */ static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; + return ['https://www.googleapis.com/auth/cloud-platform']; } getProjectId(): Promise; @@ -286,8 +335,9 @@ export class DataTransferServiceClient { * @param {function(Error, string)} callback - the callback to * be called with the current project Id. */ - getProjectId(callback?: Callback): - Promise|void { + getProjectId( + callback?: Callback + ): Promise | void { if (callback) { this.auth.getProjectId(callback); return; @@ -299,62 +349,89 @@ export class DataTransferServiceClient { // -- Service calls -- // ------------------- getDataSource( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.IDataSource, - protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.IDataSource, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest + | undefined + ), + {} | undefined + ] + >; getDataSource( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.IDataSource, - protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IDataSource, + | protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest + | null + | undefined, + {} | null | undefined + > + ): void; getDataSource( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.IDataSource, - protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest|null|undefined, - {}|null|undefined>): void; -/** - * Retrieves a supported data source and returns its settings, - * which can be used for UI rendering. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The field will contain name of the resource requested, for example: - * `projects/{project_id}/dataSources/{data_source_id}` or - * `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [DataSource]{@link google.cloud.bigquery.datatransfer.v1.DataSource}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IDataSource, + | protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Retrieves a supported data source and returns its settings, + * which can be used for UI rendering. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The field will contain name of the resource requested, for example: + * `projects/{project_id}/dataSources/{data_source_id}` or + * `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [DataSource]{@link google.cloud.bigquery.datatransfer.v1.DataSource}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ getDataSource( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest, - optionsOrCallback?: gax.CallOptions|Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.cloud.bigquery.datatransfer.v1.IDataSource, - protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.datatransfer.v1.IDataSource, - protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.IDataSource, - protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IDataSource, + | protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.IDataSource, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IGetDataSourceRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -363,99 +440,126 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'name': request.name || '', + name: request.name || '', }); this.initialize(); return this.innerApiCalls.getDataSource(request, options, callback); } createTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + ( + | protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest + | undefined + ), + {} | undefined + ] + >; createTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): void; createTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest|null|undefined, - {}|null|undefined>): void; -/** - * Creates a new data transfer configuration. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The BigQuery project id where the transfer configuration should be created. - * Must be in the format projects/{project_id}/locations/{location_id} or - * projects/{project_id}. If specified location and location of the - * destination bigquery dataset do not match - the request will fail. - * @param {google.cloud.bigquery.datatransfer.v1.TransferConfig} request.transferConfig - * Required. Data transfer configuration to create. - * @param {string} request.authorizationCode - * Optional OAuth2 authorization code to use with this transfer configuration. - * This is required if new credentials are needed, as indicated by - * `CheckValidCreds`. - * In order to obtain authorization_code, please make a - * request to - * https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= - * - * * client_id should be OAuth client_id of BigQuery DTS API for the given - * data source returned by ListDataSources method. - * * data_source_scopes are the scopes returned by ListDataSources method. - * * redirect_uri is an optional parameter. If not specified, then - * authorization code is posted to the opener of authorization flow window. - * Otherwise it will be sent to the redirect uri. A special value of - * urn:ietf:wg:oauth:2.0:oob means that authorization code should be - * returned in the title bar of the browser, with the page text prompting - * the user to copy the code and paste it in the application. - * @param {string} request.versionInfo - * Optional version info. If users want to find a very recent access token, - * that is, immediately after approving access, users have to set the - * version_info claim in the token request. To obtain the version_info, users - * must use the "none+gsession" response type. which be return a - * version_info back in the authorization response which be be put in a JWT - * claim in the token request. - * @param {string} request.serviceAccountName - * Optional service account name. If this field is set, transfer config will - * be created with this service account credentials. It requires that - * requesting user calling this API has permissions to act as this service - * account. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Creates a new data transfer configuration. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The BigQuery project id where the transfer configuration should be created. + * Must be in the format projects/{project_id}/locations/{location_id} or + * projects/{project_id}. If specified location and location of the + * destination bigquery dataset do not match - the request will fail. + * @param {google.cloud.bigquery.datatransfer.v1.TransferConfig} request.transferConfig + * Required. Data transfer configuration to create. + * @param {string} request.authorizationCode + * Optional OAuth2 authorization code to use with this transfer configuration. + * This is required if new credentials are needed, as indicated by + * `CheckValidCreds`. + * In order to obtain authorization_code, please make a + * request to + * https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + * + * * client_id should be OAuth client_id of BigQuery DTS API for the given + * data source returned by ListDataSources method. + * * data_source_scopes are the scopes returned by ListDataSources method. + * * redirect_uri is an optional parameter. If not specified, then + * authorization code is posted to the opener of authorization flow window. + * Otherwise it will be sent to the redirect uri. A special value of + * urn:ietf:wg:oauth:2.0:oob means that authorization code should be + * returned in the title bar of the browser, with the page text prompting + * the user to copy the code and paste it in the application. + * @param {string} request.versionInfo + * Optional version info. If users want to find a very recent access token, + * that is, immediately after approving access, users have to set the + * version_info claim in the token request. To obtain the version_info, users + * must use the "none+gsession" response type. which be return a + * version_info back in the authorization response which be be put in a JWT + * claim in the token request. + * @param {string} request.serviceAccountName + * Optional service account name. If this field is set, transfer config will + * be created with this service account credentials. It requires that + * requesting user calling this API has permissions to act as this service + * account. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ createTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest, - optionsOrCallback?: gax.CallOptions|Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + ( + | protos.google.cloud.bigquery.datatransfer.v1.ICreateTransferConfigRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -464,98 +568,125 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.createTransferConfig(request, options, callback); } updateTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest + | undefined + ), + {} | undefined + ] + >; updateTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): void; updateTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest|null|undefined, - {}|null|undefined>): void; -/** - * Updates a data transfer configuration. - * All fields must be set, even if they are not updated. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.bigquery.datatransfer.v1.TransferConfig} request.transferConfig - * Required. Data transfer configuration to create. - * @param {string} request.authorizationCode - * Optional OAuth2 authorization code to use with this transfer configuration. - * If it is provided, the transfer configuration will be associated with the - * authorizing user. - * In order to obtain authorization_code, please make a - * request to - * https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= - * - * * client_id should be OAuth client_id of BigQuery DTS API for the given - * data source returned by ListDataSources method. - * * data_source_scopes are the scopes returned by ListDataSources method. - * * redirect_uri is an optional parameter. If not specified, then - * authorization code is posted to the opener of authorization flow window. - * Otherwise it will be sent to the redirect uri. A special value of - * urn:ietf:wg:oauth:2.0:oob means that authorization code should be - * returned in the title bar of the browser, with the page text prompting - * the user to copy the code and paste it in the application. - * @param {google.protobuf.FieldMask} request.updateMask - * Required. Required list of fields to be updated in this request. - * @param {string} request.versionInfo - * Optional version info. If users want to find a very recent access token, - * that is, immediately after approving access, users have to set the - * version_info claim in the token request. To obtain the version_info, users - * must use the "none+gsession" response type. which be return a - * version_info back in the authorization response which be be put in a JWT - * claim in the token request. - * @param {string} request.serviceAccountName - * Optional service account name. If this field is set and - * "service_account_name" is set in update_mask, transfer config will be - * updated to use this service account credentials. It requires that - * requesting user calling this API has permissions to act as this service - * account. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Updates a data transfer configuration. + * All fields must be set, even if they are not updated. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.bigquery.datatransfer.v1.TransferConfig} request.transferConfig + * Required. Data transfer configuration to create. + * @param {string} request.authorizationCode + * Optional OAuth2 authorization code to use with this transfer configuration. + * If it is provided, the transfer configuration will be associated with the + * authorizing user. + * In order to obtain authorization_code, please make a + * request to + * https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + * + * * client_id should be OAuth client_id of BigQuery DTS API for the given + * data source returned by ListDataSources method. + * * data_source_scopes are the scopes returned by ListDataSources method. + * * redirect_uri is an optional parameter. If not specified, then + * authorization code is posted to the opener of authorization flow window. + * Otherwise it will be sent to the redirect uri. A special value of + * urn:ietf:wg:oauth:2.0:oob means that authorization code should be + * returned in the title bar of the browser, with the page text prompting + * the user to copy the code and paste it in the application. + * @param {google.protobuf.FieldMask} request.updateMask + * Required. Required list of fields to be updated in this request. + * @param {string} request.versionInfo + * Optional version info. If users want to find a very recent access token, + * that is, immediately after approving access, users have to set the + * version_info claim in the token request. To obtain the version_info, users + * must use the "none+gsession" response type. which be return a + * version_info back in the authorization response which be be put in a JWT + * claim in the token request. + * @param {string} request.serviceAccountName + * Optional service account name. If this field is set and + * "service_account_name" is set in update_mask, transfer config will be + * updated to use this service account credentials. It requires that + * requesting user calling this API has permissions to act as this service + * account. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ updateTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest, - optionsOrCallback?: gax.CallOptions|Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IUpdateTransferConfigRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -570,62 +701,89 @@ export class DataTransferServiceClient { return this.innerApiCalls.updateTransferConfig(request, options, callback); } deleteTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest + | undefined + ), + {} | undefined + ] + >; deleteTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): void; deleteTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest|null|undefined, - {}|null|undefined>): void; -/** - * Deletes a data transfer configuration, - * including any associated transfer runs and logs. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The field will contain name of the resource requested, for example: - * `projects/{project_id}/transferConfigs/{config_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Deletes a data transfer configuration, + * including any associated transfer runs and logs. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The field will contain name of the resource requested, for example: + * `projects/{project_id}/transferConfigs/{config_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ deleteTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest, - optionsOrCallback?: gax.CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferConfigRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -634,67 +792,94 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'name': request.name || '', + name: request.name || '', }); this.initialize(); return this.innerApiCalls.deleteTransferConfig(request, options, callback); } getTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest + | undefined + ), + {} | undefined + ] + >; getTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): void; getTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest|null|undefined, - {}|null|undefined>): void; -/** - * Returns information about a data transfer config. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The field will contain name of the resource requested, for example: - * `projects/{project_id}/transferConfigs/{config_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Returns information about a data transfer config. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The field will contain name of the resource requested, for example: + * `projects/{project_id}/transferConfigs/{config_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ getTransferConfig( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest, - optionsOrCallback?: gax.CallOptions|Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferConfigRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -703,77 +888,104 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'name': request.name || '', + name: request.name || '', }); this.initialize(); return this.innerApiCalls.getTransferConfig(request, options, callback); } scheduleTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest + | undefined + ), + {} | undefined + ] + >; scheduleTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest + | null + | undefined, + {} | null | undefined + > + ): void; scheduleTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest|null|undefined, - {}|null|undefined>): void; -/** - * Creates transfer runs for a time range [start_time, end_time]. - * For each date - or whatever granularity the data source supports - in the - * range, one transfer run is created. - * Note that runs are created per UTC time in the time range. - * DEPRECATED: use StartManualTransferRuns instead. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Transfer configuration name in the form: - * `projects/{project_id}/transferConfigs/{config_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - * @param {google.protobuf.Timestamp} request.startTime - * Required. Start time of the range of transfer runs. For example, - * `"2017-05-25T00:00:00+00:00"`. - * @param {google.protobuf.Timestamp} request.endTime - * Required. End time of the range of transfer runs. For example, - * `"2017-05-30T00:00:00+00:00"`. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [ScheduleTransferRunsResponse]{@link google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Creates transfer runs for a time range [start_time, end_time]. + * For each date - or whatever granularity the data source supports - in the + * range, one transfer run is created. + * Note that runs are created per UTC time in the time range. + * DEPRECATED: use StartManualTransferRuns instead. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Transfer configuration name in the form: + * `projects/{project_id}/transferConfigs/{config_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + * @param {google.protobuf.Timestamp} request.startTime + * Required. Start time of the range of transfer runs. For example, + * `"2017-05-25T00:00:00+00:00"`. + * @param {google.protobuf.Timestamp} request.endTime + * Required. End time of the range of transfer runs. For example, + * `"2017-05-30T00:00:00+00:00"`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [ScheduleTransferRunsResponse]{@link google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ scheduleTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest, - optionsOrCallback?: gax.CallOptions|Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -782,75 +994,102 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.scheduleTransferRuns(request, options, callback); } startManualTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest + | undefined + ), + {} | undefined + ] + >; startManualTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest + | null + | undefined, + {} | null | undefined + > + ): void; startManualTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest|null|undefined, - {}|null|undefined>): void; -/** - * Start manual transfer runs to be executed now with schedule_time equal to - * current time. The transfer runs can be created for a time range where the - * run_time is between start_time (inclusive) and end_time (exclusive), or for - * a specific run_time. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Transfer configuration name in the form: - * `projects/{project_id}/transferConfigs/{config_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - * @param {google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange} request.requestedTimeRange - * Time range for the transfer runs that should be started. - * @param {google.protobuf.Timestamp} request.requestedRunTime - * Specific run_time for a transfer run to be started. The - * requested_run_time must not be in the future. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [StartManualTransferRunsResponse]{@link google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Start manual transfer runs to be executed now with schedule_time equal to + * current time. The transfer runs can be created for a time range where the + * run_time is between start_time (inclusive) and end_time (exclusive), or for + * a specific run_time. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Transfer configuration name in the form: + * `projects/{project_id}/transferConfigs/{config_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + * @param {google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRange} request.requestedTimeRange + * Time range for the transfer runs that should be started. + * @param {google.protobuf.Timestamp} request.requestedRunTime + * Specific run_time for a transfer run to be started. The + * requested_run_time must not be in the future. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [StartManualTransferRunsResponse]{@link google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ startManualTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest, - optionsOrCallback?: gax.CallOptions|Callback< - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, - protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -859,67 +1098,98 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); this.initialize(); - return this.innerApiCalls.startManualTransferRuns(request, options, callback); + return this.innerApiCalls.startManualTransferRuns( + request, + options, + callback + ); } getTransferRun( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest + | undefined + ), + {} | undefined + ] + >; getTransferRun( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest + | null + | undefined, + {} | null | undefined + > + ): void; getTransferRun( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest|null|undefined, - {}|null|undefined>): void; -/** - * Returns information about the particular transfer run. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The field will contain name of the resource requested, for example: - * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [TransferRun]{@link google.cloud.bigquery.datatransfer.v1.TransferRun}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Returns information about the particular transfer run. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The field will contain name of the resource requested, for example: + * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [TransferRun]{@link google.cloud.bigquery.datatransfer.v1.TransferRun}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ getTransferRun( - request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest, - optionsOrCallback?: gax.CallOptions|Callback< - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, - protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IGetTransferRunRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -928,67 +1198,94 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'name': request.name || '', + name: request.name || '', }); this.initialize(); return this.innerApiCalls.getTransferRun(request, options, callback); } deleteTransferRun( - request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest + | undefined + ), + {} | undefined + ] + >; deleteTransferRun( - request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest + | null + | undefined, + {} | null | undefined + > + ): void; deleteTransferRun( - request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest|null|undefined, - {}|null|undefined>): void; -/** - * Deletes the specified transfer run. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The field will contain name of the resource requested, for example: - * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Deletes the specified transfer run. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The field will contain name of the resource requested, for example: + * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ deleteTransferRun( - request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest, - optionsOrCallback?: gax.CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.protobuf.IEmpty, + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.protobuf.IEmpty, + ( + | protos.google.cloud.bigquery.datatransfer.v1.IDeleteTransferRunRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -997,72 +1294,99 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'name': request.name || '', + name: request.name || '', }); this.initialize(); return this.innerApiCalls.deleteTransferRun(request, options, callback); } checkValidCreds( - request: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest|undefined, {}|undefined - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, + ( + | protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest + | undefined + ), + {} | undefined + ] + >; checkValidCreds( - request: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest, - options: gax.CallOptions, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest|null|undefined, - {}|null|undefined>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest, + options: gax.CallOptions, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest + | null + | undefined, + {} | null | undefined + > + ): void; checkValidCreds( - request: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest, - callback: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest|null|undefined, - {}|null|undefined>): void; -/** - * Returns true if valid credentials exist for the given data source and - * requesting user. - * Some data sources doesn't support service account, so we need to talk to - * them on behalf of the end user. This API just checks whether we have OAuth - * token for the particular user, which is a pre-requisite before user can - * create a transfer config. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The data source in the form: - * `projects/{project_id}/dataSources/{data_source_id}` or - * `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [CheckValidCredsResponse]{@link google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse}. - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest, + callback: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest + | null + | undefined, + {} | null | undefined + > + ): void; + /** + * Returns true if valid credentials exist for the given data source and + * requesting user. + * Some data sources doesn't support service account, so we need to talk to + * them on behalf of the end user. This API just checks whether we have OAuth + * token for the particular user, which is a pre-requisite before user can + * create a transfer config. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The data source in the form: + * `projects/{project_id}/dataSources/{data_source_id}` or + * `projects/{project_id}/locations/{location_id}/dataSources/{data_source_id}`. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [CheckValidCredsResponse]{@link google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse}. + * The promise has a method named "cancel" which cancels the ongoing API call. + */ checkValidCreds( - request: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest, - optionsOrCallback?: gax.CallOptions|Callback< + request: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest, + optionsOrCallback?: + | gax.CallOptions + | Callback< protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, - protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest|undefined, {}|undefined - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest + | null + | undefined, + {} | null | undefined + >, + callback?: Callback< + protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, + | protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest + | null + | undefined, + {} | null | undefined + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse, + ( + | protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsRequest + | undefined + ), + {} | undefined + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -1071,91 +1395,110 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'name': request.name || '', + name: request.name || '', }); this.initialize(); return this.innerApiCalls.checkValidCreds(request, options, callback); } listDataSources( - request: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.IDataSource[], - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest|null, - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.IDataSource[], + protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest | null, + protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse + ] + >; listDataSources( - request: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - options: gax.CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.IDataSource>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + options: gax.CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.IDataSource + > + ): void; listDataSources( - request: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.IDataSource>): void; -/** - * Lists supported data sources and returns their settings, - * which can be used for UI rendering. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The BigQuery project id for which data sources should be returned. - * Must be in the form: `projects/{project_id}` or - * `projects/{project_id}/locations/{location_id} - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListDataSourcesRequest` list results. For multiple-page - * results, `ListDataSourcesResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [DataSource]{@link google.cloud.bigquery.datatransfer.v1.DataSource}. - * The client library support auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * - * When autoPaginate: false is specified through options, the array has three elements. - * The first element is Array of [DataSource]{@link google.cloud.bigquery.datatransfer.v1.DataSource} that corresponds to - * the one page received from the API server. - * If the second element is not null it contains the request object of type [ListDataSourcesRequest]{@link google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest} - * that can be used to obtain the next page of the results. - * If it is null, the next page does not exist. - * The third element contains the raw response received from the API server. Its type is - * [ListDataSourcesResponse]{@link google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse}. - * - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.IDataSource + > + ): void; + /** + * Lists supported data sources and returns their settings, + * which can be used for UI rendering. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The BigQuery project id for which data sources should be returned. + * Must be in the form: `projects/{project_id}` or + * `projects/{project_id}/locations/{location_id} + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListDataSourcesRequest` list results. For multiple-page + * results, `ListDataSourcesResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [DataSource]{@link google.cloud.bigquery.datatransfer.v1.DataSource}. + * The client library support auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * + * When autoPaginate: false is specified through options, the array has three elements. + * The first element is Array of [DataSource]{@link google.cloud.bigquery.datatransfer.v1.DataSource} that corresponds to + * the one page received from the API server. + * If the second element is not null it contains the request object of type [ListDataSourcesRequest]{@link google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest} + * that can be used to obtain the next page of the results. + * If it is null, the next page does not exist. + * The third element contains the raw response received from the API server. Its type is + * [ListDataSourcesResponse]{@link google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse}. + * + * The promise has a method named "cancel" which cancels the ongoing API call. + */ listDataSources( - request: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - optionsOrCallback?: gax.CallOptions|PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.IDataSource>, - callback?: PaginationCallback< + request: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + optionsOrCallback?: + | gax.CallOptions + | PaginationCallback< protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.IDataSource>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.IDataSource[], - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest|null, - protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.IDataSource + >, + callback?: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.IDataSource + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.IDataSource[], + protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest | null, + protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesResponse + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -1164,48 +1507,48 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.listDataSources(request, options, callback); } -/** - * Equivalent to {@link listDataSources}, but returns a NodeJS Stream object. - * - * This fetches the paged responses for {@link listDataSources} continuously - * and invokes the callback registered for 'data' event for each element in the - * responses. - * - * The returned object has 'end' method when no more elements are required. - * - * autoPaginate option will be ignored. - * - * @see {@link https://nodejs.org/api/stream.html} - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The BigQuery project id for which data sources should be returned. - * Must be in the form: `projects/{project_id}` or - * `projects/{project_id}/locations/{location_id} - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListDataSourcesRequest` list results. For multiple-page - * results, `ListDataSourcesResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [DataSource]{@link google.cloud.bigquery.datatransfer.v1.DataSource} on 'data' event. - */ + /** + * Equivalent to {@link listDataSources}, but returns a NodeJS Stream object. + * + * This fetches the paged responses for {@link listDataSources} continuously + * and invokes the callback registered for 'data' event for each element in the + * responses. + * + * The returned object has 'end' method when no more elements are required. + * + * autoPaginate option will be ignored. + * + * @see {@link https://nodejs.org/api/stream.html} + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The BigQuery project id for which data sources should be returned. + * Must be in the form: `projects/{project_id}` or + * `projects/{project_id}/locations/{location_id} + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListDataSourcesRequest` list results. For multiple-page + * results, `ListDataSourcesResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [DataSource]{@link google.cloud.bigquery.datatransfer.v1.DataSource} on 'data' event. + */ listDataSourcesStream( - request?: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - options?: gax.CallOptions): - Transform{ + request?: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + options?: gax.CallOptions + ): Transform { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -1213,7 +1556,7 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); const callSettings = new gax.CallSettings(options); this.initialize(); @@ -1224,34 +1567,34 @@ export class DataTransferServiceClient { ); } -/** - * Equivalent to {@link listDataSources}, but returns an iterable object. - * - * for-await-of syntax is used with the iterable to recursively get response element on-demand. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The BigQuery project id for which data sources should be returned. - * Must be in the form: `projects/{project_id}` or - * `projects/{project_id}/locations/{location_id} - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListDataSourcesRequest` list results. For multiple-page - * results, `ListDataSourcesResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. - */ + /** + * Equivalent to {@link listDataSources}, but returns an iterable object. + * + * for-await-of syntax is used with the iterable to recursively get response element on-demand. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The BigQuery project id for which data sources should be returned. + * Must be in the form: `projects/{project_id}` or + * `projects/{project_id}/locations/{location_id} + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListDataSourcesRequest` list results. For multiple-page + * results, `ListDataSourcesResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. + */ listDataSourcesAsync( - request?: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, - options?: gax.CallOptions): - AsyncIterable{ + request?: protos.google.cloud.bigquery.datatransfer.v1.IListDataSourcesRequest, + options?: gax.CallOptions + ): AsyncIterable { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -1259,97 +1602,118 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); options = options || {}; const callSettings = new gax.CallSettings(options); this.initialize(); return this.descriptors.page.listDataSources.asyncIterate( this.innerApiCalls['listDataSources'] as GaxCall, - request as unknown as RequestType, + (request as unknown) as RequestType, callSettings - ) as AsyncIterable; + ) as AsyncIterable< + protos.google.cloud.bigquery.datatransfer.v1.IDataSource + >; } listTransferConfigs( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[], - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest|null, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[], + protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest | null, + protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse + ] + >; listTransferConfigs( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - options: gax.CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + options: gax.CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig + > + ): void; listTransferConfigs( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig>): void; -/** - * Returns information about all data transfers in the project. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The BigQuery project id for which data sources - * should be returned: `projects/{project_id}` or - * `projects/{project_id}/locations/{location_id}` - * @param {string[]} request.dataSourceIds - * When specified, only configurations of requested data sources are returned. - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransfersRequest` list results. For multiple-page - * results, `ListTransfersResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig}. - * The client library support auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * - * When autoPaginate: false is specified through options, the array has three elements. - * The first element is Array of [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig} that corresponds to - * the one page received from the API server. - * If the second element is not null it contains the request object of type [ListTransferConfigsRequest]{@link google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest} - * that can be used to obtain the next page of the results. - * If it is null, the next page does not exist. - * The third element contains the raw response received from the API server. Its type is - * [ListTransferConfigsResponse]{@link google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse}. - * - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig + > + ): void; + /** + * Returns information about all data transfers in the project. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The BigQuery project id for which data sources + * should be returned: `projects/{project_id}` or + * `projects/{project_id}/locations/{location_id}` + * @param {string[]} request.dataSourceIds + * When specified, only configurations of requested data sources are returned. + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransfersRequest` list results. For multiple-page + * results, `ListTransfersResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig}. + * The client library support auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * + * When autoPaginate: false is specified through options, the array has three elements. + * The first element is Array of [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig} that corresponds to + * the one page received from the API server. + * If the second element is not null it contains the request object of type [ListTransferConfigsRequest]{@link google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest} + * that can be used to obtain the next page of the results. + * If it is null, the next page does not exist. + * The third element contains the raw response received from the API server. Its type is + * [ListTransferConfigsResponse]{@link google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse}. + * + * The promise has a method named "cancel" which cancels the ongoing API call. + */ listTransferConfigs( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - optionsOrCallback?: gax.CallOptions|PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig>, - callback?: PaginationCallback< + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + optionsOrCallback?: + | gax.CallOptions + | PaginationCallback< protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[], - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest|null, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig + >, + callback?: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[], + protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest | null, + protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsResponse + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -1358,50 +1722,50 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.listTransferConfigs(request, options, callback); } -/** - * Equivalent to {@link listTransferConfigs}, but returns a NodeJS Stream object. - * - * This fetches the paged responses for {@link listTransferConfigs} continuously - * and invokes the callback registered for 'data' event for each element in the - * responses. - * - * The returned object has 'end' method when no more elements are required. - * - * autoPaginate option will be ignored. - * - * @see {@link https://nodejs.org/api/stream.html} - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The BigQuery project id for which data sources - * should be returned: `projects/{project_id}` or - * `projects/{project_id}/locations/{location_id}` - * @param {string[]} request.dataSourceIds - * When specified, only configurations of requested data sources are returned. - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransfersRequest` list results. For multiple-page - * results, `ListTransfersResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig} on 'data' event. - */ + /** + * Equivalent to {@link listTransferConfigs}, but returns a NodeJS Stream object. + * + * This fetches the paged responses for {@link listTransferConfigs} continuously + * and invokes the callback registered for 'data' event for each element in the + * responses. + * + * The returned object has 'end' method when no more elements are required. + * + * autoPaginate option will be ignored. + * + * @see {@link https://nodejs.org/api/stream.html} + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The BigQuery project id for which data sources + * should be returned: `projects/{project_id}` or + * `projects/{project_id}/locations/{location_id}` + * @param {string[]} request.dataSourceIds + * When specified, only configurations of requested data sources are returned. + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransfersRequest` list results. For multiple-page + * results, `ListTransfersResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [TransferConfig]{@link google.cloud.bigquery.datatransfer.v1.TransferConfig} on 'data' event. + */ listTransferConfigsStream( - request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - options?: gax.CallOptions): - Transform{ + request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + options?: gax.CallOptions + ): Transform { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -1409,7 +1773,7 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); const callSettings = new gax.CallSettings(options); this.initialize(); @@ -1420,36 +1784,38 @@ export class DataTransferServiceClient { ); } -/** - * Equivalent to {@link listTransferConfigs}, but returns an iterable object. - * - * for-await-of syntax is used with the iterable to recursively get response element on-demand. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The BigQuery project id for which data sources - * should be returned: `projects/{project_id}` or - * `projects/{project_id}/locations/{location_id}` - * @param {string[]} request.dataSourceIds - * When specified, only configurations of requested data sources are returned. - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransfersRequest` list results. For multiple-page - * results, `ListTransfersResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. - */ + /** + * Equivalent to {@link listTransferConfigs}, but returns an iterable object. + * + * for-await-of syntax is used with the iterable to recursively get response element on-demand. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The BigQuery project id for which data sources + * should be returned: `projects/{project_id}` or + * `projects/{project_id}/locations/{location_id}` + * @param {string[]} request.dataSourceIds + * When specified, only configurations of requested data sources are returned. + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransfersRequest` list results. For multiple-page + * results, `ListTransfersResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. + */ listTransferConfigsAsync( - request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, - options?: gax.CallOptions): - AsyncIterable{ + request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferConfigsRequest, + options?: gax.CallOptions + ): AsyncIterable< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig + > { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -1457,100 +1823,121 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); options = options || {}; const callSettings = new gax.CallSettings(options); this.initialize(); return this.descriptors.page.listTransferConfigs.asyncIterate( this.innerApiCalls['listTransferConfigs'] as GaxCall, - request as unknown as RequestType, + (request as unknown) as RequestType, callSettings - ) as AsyncIterable; + ) as AsyncIterable< + protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig + >; } listTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[], - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest|null, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[], + protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest | null, + protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse + ] + >; listTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - options: gax.CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + options: gax.CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun + > + ): void; listTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun>): void; -/** - * Returns information about running and completed jobs. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Name of transfer configuration for which transfer runs should be retrieved. - * Format of transfer configuration resource name is: - * `projects/{project_id}/transferConfigs/{config_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - * @param {number[]} request.states - * When specified, only transfer runs with requested states are returned. - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransferRunsRequest` list results. For multiple-page - * results, `ListTransferRunsResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt} request.runAttempt - * Indicates how run attempts are to be pulled. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [TransferRun]{@link google.cloud.bigquery.datatransfer.v1.TransferRun}. - * The client library support auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * - * When autoPaginate: false is specified through options, the array has three elements. - * The first element is Array of [TransferRun]{@link google.cloud.bigquery.datatransfer.v1.TransferRun} that corresponds to - * the one page received from the API server. - * If the second element is not null it contains the request object of type [ListTransferRunsRequest]{@link google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest} - * that can be used to obtain the next page of the results. - * If it is null, the next page does not exist. - * The third element contains the raw response received from the API server. Its type is - * [ListTransferRunsResponse]{@link google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse}. - * - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun + > + ): void; + /** + * Returns information about running and completed jobs. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Name of transfer configuration for which transfer runs should be retrieved. + * Format of transfer configuration resource name is: + * `projects/{project_id}/transferConfigs/{config_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + * @param {number[]} request.states + * When specified, only transfer runs with requested states are returned. + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransferRunsRequest` list results. For multiple-page + * results, `ListTransferRunsResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt} request.runAttempt + * Indicates how run attempts are to be pulled. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [TransferRun]{@link google.cloud.bigquery.datatransfer.v1.TransferRun}. + * The client library support auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * + * When autoPaginate: false is specified through options, the array has three elements. + * The first element is Array of [TransferRun]{@link google.cloud.bigquery.datatransfer.v1.TransferRun} that corresponds to + * the one page received from the API server. + * If the second element is not null it contains the request object of type [ListTransferRunsRequest]{@link google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest} + * that can be used to obtain the next page of the results. + * If it is null, the next page does not exist. + * The third element contains the raw response received from the API server. Its type is + * [ListTransferRunsResponse]{@link google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse}. + * + * The promise has a method named "cancel" which cancels the ongoing API call. + */ listTransferRuns( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - optionsOrCallback?: gax.CallOptions|PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun>, - callback?: PaginationCallback< + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + optionsOrCallback?: + | gax.CallOptions + | PaginationCallback< protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[], - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest|null, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun + >, + callback?: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[], + protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest | null, + protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsResponse + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -1559,53 +1946,53 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.listTransferRuns(request, options, callback); } -/** - * Equivalent to {@link listTransferRuns}, but returns a NodeJS Stream object. - * - * This fetches the paged responses for {@link listTransferRuns} continuously - * and invokes the callback registered for 'data' event for each element in the - * responses. - * - * The returned object has 'end' method when no more elements are required. - * - * autoPaginate option will be ignored. - * - * @see {@link https://nodejs.org/api/stream.html} - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Name of transfer configuration for which transfer runs should be retrieved. - * Format of transfer configuration resource name is: - * `projects/{project_id}/transferConfigs/{config_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - * @param {number[]} request.states - * When specified, only transfer runs with requested states are returned. - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransferRunsRequest` list results. For multiple-page - * results, `ListTransferRunsResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt} request.runAttempt - * Indicates how run attempts are to be pulled. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [TransferRun]{@link google.cloud.bigquery.datatransfer.v1.TransferRun} on 'data' event. - */ + /** + * Equivalent to {@link listTransferRuns}, but returns a NodeJS Stream object. + * + * This fetches the paged responses for {@link listTransferRuns} continuously + * and invokes the callback registered for 'data' event for each element in the + * responses. + * + * The returned object has 'end' method when no more elements are required. + * + * autoPaginate option will be ignored. + * + * @see {@link https://nodejs.org/api/stream.html} + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Name of transfer configuration for which transfer runs should be retrieved. + * Format of transfer configuration resource name is: + * `projects/{project_id}/transferConfigs/{config_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + * @param {number[]} request.states + * When specified, only transfer runs with requested states are returned. + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransferRunsRequest` list results. For multiple-page + * results, `ListTransferRunsResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt} request.runAttempt + * Indicates how run attempts are to be pulled. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [TransferRun]{@link google.cloud.bigquery.datatransfer.v1.TransferRun} on 'data' event. + */ listTransferRunsStream( - request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - options?: gax.CallOptions): - Transform{ + request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + options?: gax.CallOptions + ): Transform { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -1613,7 +2000,7 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); const callSettings = new gax.CallSettings(options); this.initialize(); @@ -1624,39 +2011,39 @@ export class DataTransferServiceClient { ); } -/** - * Equivalent to {@link listTransferRuns}, but returns an iterable object. - * - * for-await-of syntax is used with the iterable to recursively get response element on-demand. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Name of transfer configuration for which transfer runs should be retrieved. - * Format of transfer configuration resource name is: - * `projects/{project_id}/transferConfigs/{config_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. - * @param {number[]} request.states - * When specified, only transfer runs with requested states are returned. - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransferRunsRequest` list results. For multiple-page - * results, `ListTransferRunsResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt} request.runAttempt - * Indicates how run attempts are to be pulled. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. - */ + /** + * Equivalent to {@link listTransferRuns}, but returns an iterable object. + * + * for-await-of syntax is used with the iterable to recursively get response element on-demand. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Name of transfer configuration for which transfer runs should be retrieved. + * Format of transfer configuration resource name is: + * `projects/{project_id}/transferConfigs/{config_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}`. + * @param {number[]} request.states + * When specified, only transfer runs with requested states are returned. + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransferRunsRequest` list results. For multiple-page + * results, `ListTransferRunsResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt} request.runAttempt + * Indicates how run attempts are to be pulled. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. + */ listTransferRunsAsync( - request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, - options?: gax.CallOptions): - AsyncIterable{ + request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferRunsRequest, + options?: gax.CallOptions + ): AsyncIterable { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -1664,98 +2051,119 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); options = options || {}; const callSettings = new gax.CallSettings(options); this.initialize(); return this.descriptors.page.listTransferRuns.asyncIterate( this.innerApiCalls['listTransferRuns'] as GaxCall, - request as unknown as RequestType, + (request as unknown) as RequestType, callSettings - ) as AsyncIterable; + ) as AsyncIterable< + protos.google.cloud.bigquery.datatransfer.v1.ITransferRun + >; } listTransferLogs( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - options?: gax.CallOptions): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[], - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest|null, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse - ]>; + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + options?: gax.CallOptions + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[], + protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest | null, + protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse + ] + >; listTransferLogs( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - options: gax.CallOptions, - callback: PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage>): void; + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + options: gax.CallOptions, + callback: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage + > + ): void; listTransferLogs( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - callback: PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage>): void; -/** - * Returns user facing log messages for the data transfer run. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Transfer run name in the form: - * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransferLogsRequest` list results. For multiple-page - * results, `ListTransferLogsResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {number[]} request.messageTypes - * Message types to return. If not populated - INFO, WARNING and ERROR - * messages are returned. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [TransferMessage]{@link google.cloud.bigquery.datatransfer.v1.TransferMessage}. - * The client library support auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * - * When autoPaginate: false is specified through options, the array has three elements. - * The first element is Array of [TransferMessage]{@link google.cloud.bigquery.datatransfer.v1.TransferMessage} that corresponds to - * the one page received from the API server. - * If the second element is not null it contains the request object of type [ListTransferLogsRequest]{@link google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest} - * that can be used to obtain the next page of the results. - * If it is null, the next page does not exist. - * The third element contains the raw response received from the API server. Its type is - * [ListTransferLogsResponse]{@link google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse}. - * - * The promise has a method named "cancel" which cancels the ongoing API call. - */ + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + callback: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage + > + ): void; + /** + * Returns user facing log messages for the data transfer run. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Transfer run name in the form: + * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransferLogsRequest` list results. For multiple-page + * results, `ListTransferLogsResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {number[]} request.messageTypes + * Message types to return. If not populated - INFO, WARNING and ERROR + * messages are returned. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [TransferMessage]{@link google.cloud.bigquery.datatransfer.v1.TransferMessage}. + * The client library support auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * + * When autoPaginate: false is specified through options, the array has three elements. + * The first element is Array of [TransferMessage]{@link google.cloud.bigquery.datatransfer.v1.TransferMessage} that corresponds to + * the one page received from the API server. + * If the second element is not null it contains the request object of type [ListTransferLogsRequest]{@link google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest} + * that can be used to obtain the next page of the results. + * If it is null, the next page does not exist. + * The third element contains the raw response received from the API server. Its type is + * [ListTransferLogsResponse]{@link google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse}. + * + * The promise has a method named "cancel" which cancels the ongoing API call. + */ listTransferLogs( - request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - optionsOrCallback?: gax.CallOptions|PaginationCallback< - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage>, - callback?: PaginationCallback< + request: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + optionsOrCallback?: + | gax.CallOptions + | PaginationCallback< protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse|null|undefined, - protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage>): - Promise<[ - protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[], - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest|null, - protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse - ]>|void { + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage + >, + callback?: PaginationCallback< + protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + | protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse + | null + | undefined, + protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage + > + ): Promise< + [ + protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[], + protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest | null, + protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsResponse + ] + > | void { request = request || {}; let options: gax.CallOptions; if (typeof optionsOrCallback === 'function' && callback === undefined) { callback = optionsOrCallback; options = {}; - } - else { + } else { options = optionsOrCallback as gax.CallOptions; } options = options || {}; @@ -1764,51 +2172,51 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); this.initialize(); return this.innerApiCalls.listTransferLogs(request, options, callback); } -/** - * Equivalent to {@link listTransferLogs}, but returns a NodeJS Stream object. - * - * This fetches the paged responses for {@link listTransferLogs} continuously - * and invokes the callback registered for 'data' event for each element in the - * responses. - * - * The returned object has 'end' method when no more elements are required. - * - * autoPaginate option will be ignored. - * - * @see {@link https://nodejs.org/api/stream.html} - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Transfer run name in the form: - * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransferLogsRequest` list results. For multiple-page - * results, `ListTransferLogsResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {number[]} request.messageTypes - * Message types to return. If not populated - INFO, WARNING and ERROR - * messages are returned. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [TransferMessage]{@link google.cloud.bigquery.datatransfer.v1.TransferMessage} on 'data' event. - */ + /** + * Equivalent to {@link listTransferLogs}, but returns a NodeJS Stream object. + * + * This fetches the paged responses for {@link listTransferLogs} continuously + * and invokes the callback registered for 'data' event for each element in the + * responses. + * + * The returned object has 'end' method when no more elements are required. + * + * autoPaginate option will be ignored. + * + * @see {@link https://nodejs.org/api/stream.html} + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Transfer run name in the form: + * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransferLogsRequest` list results. For multiple-page + * results, `ListTransferLogsResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {number[]} request.messageTypes + * Message types to return. If not populated - INFO, WARNING and ERROR + * messages are returned. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [TransferMessage]{@link google.cloud.bigquery.datatransfer.v1.TransferMessage} on 'data' event. + */ listTransferLogsStream( - request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - options?: gax.CallOptions): - Transform{ + request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + options?: gax.CallOptions + ): Transform { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -1816,7 +2224,7 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); const callSettings = new gax.CallSettings(options); this.initialize(); @@ -1827,37 +2235,39 @@ export class DataTransferServiceClient { ); } -/** - * Equivalent to {@link listTransferLogs}, but returns an iterable object. - * - * for-await-of syntax is used with the iterable to recursively get response element on-demand. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. Transfer run name in the form: - * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or - * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` - * @param {string} request.pageToken - * Pagination token, which can be used to request a specific page - * of `ListTransferLogsRequest` list results. For multiple-page - * results, `ListTransferLogsResponse` outputs - * a `next_page` token, which can be used as the - * `page_token` value to request the next page of list results. - * @param {number} request.pageSize - * Page size. The default page size is the maximum value of 1000 results. - * @param {number[]} request.messageTypes - * Message types to return. If not populated - INFO, WARNING and ERROR - * messages are returned. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. - */ + /** + * Equivalent to {@link listTransferLogs}, but returns an iterable object. + * + * for-await-of syntax is used with the iterable to recursively get response element on-demand. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. Transfer run name in the form: + * `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` or + * `projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}` + * @param {string} request.pageToken + * Pagination token, which can be used to request a specific page + * of `ListTransferLogsRequest` list results. For multiple-page + * results, `ListTransferLogsResponse` outputs + * a `next_page` token, which can be used as the + * `page_token` value to request the next page of list results. + * @param {number} request.pageSize + * Page size. The default page size is the maximum value of 1000 results. + * @param {number[]} request.messageTypes + * Message types to return. If not populated - INFO, WARNING and ERROR + * messages are returned. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that conforms to @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols. + */ listTransferLogsAsync( - request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, - options?: gax.CallOptions): - AsyncIterable{ + request?: protos.google.cloud.bigquery.datatransfer.v1.IListTransferLogsRequest, + options?: gax.CallOptions + ): AsyncIterable< + protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage + > { request = request || {}; options = options || {}; options.otherArgs = options.otherArgs || {}; @@ -1865,16 +2275,18 @@ export class DataTransferServiceClient { options.otherArgs.headers[ 'x-goog-request-params' ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', + parent: request.parent || '', }); options = options || {}; const callSettings = new gax.CallSettings(options); this.initialize(); return this.descriptors.page.listTransferLogs.asyncIterate( this.innerApiCalls['listTransferLogs'] as GaxCall, - request as unknown as RequestType, + (request as unknown) as RequestType, callSettings - ) as AsyncIterable; + ) as AsyncIterable< + protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage + >; } // -------------------- // -- Path templates -- @@ -1886,7 +2298,7 @@ export class DataTransferServiceClient { * @param {string} project * @returns {string} Resource name string. */ - projectPath(project:string) { + projectPath(project: string) { return this.pathTemplates.projectPathTemplate.render({ project: project, }); @@ -1910,7 +2322,7 @@ export class DataTransferServiceClient { * @param {string} data_source * @returns {string} Resource name string. */ - projectDataSourcePath(project:string,dataSource:string) { + projectDataSourcePath(project: string, dataSource: string) { return this.pathTemplates.projectDataSourcePathTemplate.render({ project: project, data_source: dataSource, @@ -1925,7 +2337,9 @@ export class DataTransferServiceClient { * @returns {string} A string representing the project. */ matchProjectFromProjectDataSourceName(projectDataSourceName: string) { - return this.pathTemplates.projectDataSourcePathTemplate.match(projectDataSourceName).project; + return this.pathTemplates.projectDataSourcePathTemplate.match( + projectDataSourceName + ).project; } /** @@ -1936,7 +2350,9 @@ export class DataTransferServiceClient { * @returns {string} A string representing the data_source. */ matchDataSourceFromProjectDataSourceName(projectDataSourceName: string) { - return this.pathTemplates.projectDataSourcePathTemplate.match(projectDataSourceName).data_source; + return this.pathTemplates.projectDataSourcePathTemplate.match( + projectDataSourceName + ).data_source; } /** @@ -1947,7 +2363,11 @@ export class DataTransferServiceClient { * @param {string} data_source * @returns {string} Resource name string. */ - projectLocationDataSourcePath(project:string,location:string,dataSource:string) { + projectLocationDataSourcePath( + project: string, + location: string, + dataSource: string + ) { return this.pathTemplates.projectLocationDataSourcePathTemplate.render({ project: project, location: location, @@ -1962,8 +2382,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_data_source resource. * @returns {string} A string representing the project. */ - matchProjectFromProjectLocationDataSourceName(projectLocationDataSourceName: string) { - return this.pathTemplates.projectLocationDataSourcePathTemplate.match(projectLocationDataSourceName).project; + matchProjectFromProjectLocationDataSourceName( + projectLocationDataSourceName: string + ) { + return this.pathTemplates.projectLocationDataSourcePathTemplate.match( + projectLocationDataSourceName + ).project; } /** @@ -1973,8 +2397,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_data_source resource. * @returns {string} A string representing the location. */ - matchLocationFromProjectLocationDataSourceName(projectLocationDataSourceName: string) { - return this.pathTemplates.projectLocationDataSourcePathTemplate.match(projectLocationDataSourceName).location; + matchLocationFromProjectLocationDataSourceName( + projectLocationDataSourceName: string + ) { + return this.pathTemplates.projectLocationDataSourcePathTemplate.match( + projectLocationDataSourceName + ).location; } /** @@ -1984,8 +2412,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_data_source resource. * @returns {string} A string representing the data_source. */ - matchDataSourceFromProjectLocationDataSourceName(projectLocationDataSourceName: string) { - return this.pathTemplates.projectLocationDataSourcePathTemplate.match(projectLocationDataSourceName).data_source; + matchDataSourceFromProjectLocationDataSourceName( + projectLocationDataSourceName: string + ) { + return this.pathTemplates.projectLocationDataSourcePathTemplate.match( + projectLocationDataSourceName + ).data_source; } /** @@ -1996,7 +2428,11 @@ export class DataTransferServiceClient { * @param {string} transfer_config * @returns {string} Resource name string. */ - projectLocationTransferConfigPath(project:string,location:string,transferConfig:string) { + projectLocationTransferConfigPath( + project: string, + location: string, + transferConfig: string + ) { return this.pathTemplates.projectLocationTransferConfigPathTemplate.render({ project: project, location: location, @@ -2011,8 +2447,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_transfer_config resource. * @returns {string} A string representing the project. */ - matchProjectFromProjectLocationTransferConfigName(projectLocationTransferConfigName: string) { - return this.pathTemplates.projectLocationTransferConfigPathTemplate.match(projectLocationTransferConfigName).project; + matchProjectFromProjectLocationTransferConfigName( + projectLocationTransferConfigName: string + ) { + return this.pathTemplates.projectLocationTransferConfigPathTemplate.match( + projectLocationTransferConfigName + ).project; } /** @@ -2022,8 +2462,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_transfer_config resource. * @returns {string} A string representing the location. */ - matchLocationFromProjectLocationTransferConfigName(projectLocationTransferConfigName: string) { - return this.pathTemplates.projectLocationTransferConfigPathTemplate.match(projectLocationTransferConfigName).location; + matchLocationFromProjectLocationTransferConfigName( + projectLocationTransferConfigName: string + ) { + return this.pathTemplates.projectLocationTransferConfigPathTemplate.match( + projectLocationTransferConfigName + ).location; } /** @@ -2033,8 +2477,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_transfer_config resource. * @returns {string} A string representing the transfer_config. */ - matchTransferConfigFromProjectLocationTransferConfigName(projectLocationTransferConfigName: string) { - return this.pathTemplates.projectLocationTransferConfigPathTemplate.match(projectLocationTransferConfigName).transfer_config; + matchTransferConfigFromProjectLocationTransferConfigName( + projectLocationTransferConfigName: string + ) { + return this.pathTemplates.projectLocationTransferConfigPathTemplate.match( + projectLocationTransferConfigName + ).transfer_config; } /** @@ -2046,13 +2494,20 @@ export class DataTransferServiceClient { * @param {string} run * @returns {string} Resource name string. */ - projectLocationTransferConfigRunPath(project:string,location:string,transferConfig:string,run:string) { - return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.render({ - project: project, - location: location, - transfer_config: transferConfig, - run: run, - }); + projectLocationTransferConfigRunPath( + project: string, + location: string, + transferConfig: string, + run: string + ) { + return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.render( + { + project: project, + location: location, + transfer_config: transferConfig, + run: run, + } + ); } /** @@ -2062,8 +2517,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_transfer_config_run resource. * @returns {string} A string representing the project. */ - matchProjectFromProjectLocationTransferConfigRunName(projectLocationTransferConfigRunName: string) { - return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.match(projectLocationTransferConfigRunName).project; + matchProjectFromProjectLocationTransferConfigRunName( + projectLocationTransferConfigRunName: string + ) { + return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.match( + projectLocationTransferConfigRunName + ).project; } /** @@ -2073,8 +2532,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_transfer_config_run resource. * @returns {string} A string representing the location. */ - matchLocationFromProjectLocationTransferConfigRunName(projectLocationTransferConfigRunName: string) { - return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.match(projectLocationTransferConfigRunName).location; + matchLocationFromProjectLocationTransferConfigRunName( + projectLocationTransferConfigRunName: string + ) { + return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.match( + projectLocationTransferConfigRunName + ).location; } /** @@ -2084,8 +2547,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_transfer_config_run resource. * @returns {string} A string representing the transfer_config. */ - matchTransferConfigFromProjectLocationTransferConfigRunName(projectLocationTransferConfigRunName: string) { - return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.match(projectLocationTransferConfigRunName).transfer_config; + matchTransferConfigFromProjectLocationTransferConfigRunName( + projectLocationTransferConfigRunName: string + ) { + return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.match( + projectLocationTransferConfigRunName + ).transfer_config; } /** @@ -2095,8 +2562,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_location_transfer_config_run resource. * @returns {string} A string representing the run. */ - matchRunFromProjectLocationTransferConfigRunName(projectLocationTransferConfigRunName: string) { - return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.match(projectLocationTransferConfigRunName).run; + matchRunFromProjectLocationTransferConfigRunName( + projectLocationTransferConfigRunName: string + ) { + return this.pathTemplates.projectLocationTransferConfigRunPathTemplate.match( + projectLocationTransferConfigRunName + ).run; } /** @@ -2106,7 +2577,7 @@ export class DataTransferServiceClient { * @param {string} transfer_config * @returns {string} Resource name string. */ - projectTransferConfigPath(project:string,transferConfig:string) { + projectTransferConfigPath(project: string, transferConfig: string) { return this.pathTemplates.projectTransferConfigPathTemplate.render({ project: project, transfer_config: transferConfig, @@ -2121,7 +2592,9 @@ export class DataTransferServiceClient { * @returns {string} A string representing the project. */ matchProjectFromProjectTransferConfigName(projectTransferConfigName: string) { - return this.pathTemplates.projectTransferConfigPathTemplate.match(projectTransferConfigName).project; + return this.pathTemplates.projectTransferConfigPathTemplate.match( + projectTransferConfigName + ).project; } /** @@ -2131,8 +2604,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_transfer_config resource. * @returns {string} A string representing the transfer_config. */ - matchTransferConfigFromProjectTransferConfigName(projectTransferConfigName: string) { - return this.pathTemplates.projectTransferConfigPathTemplate.match(projectTransferConfigName).transfer_config; + matchTransferConfigFromProjectTransferConfigName( + projectTransferConfigName: string + ) { + return this.pathTemplates.projectTransferConfigPathTemplate.match( + projectTransferConfigName + ).transfer_config; } /** @@ -2143,7 +2620,11 @@ export class DataTransferServiceClient { * @param {string} run * @returns {string} Resource name string. */ - projectTransferConfigRunPath(project:string,transferConfig:string,run:string) { + projectTransferConfigRunPath( + project: string, + transferConfig: string, + run: string + ) { return this.pathTemplates.projectTransferConfigRunPathTemplate.render({ project: project, transfer_config: transferConfig, @@ -2158,8 +2639,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_transfer_config_run resource. * @returns {string} A string representing the project. */ - matchProjectFromProjectTransferConfigRunName(projectTransferConfigRunName: string) { - return this.pathTemplates.projectTransferConfigRunPathTemplate.match(projectTransferConfigRunName).project; + matchProjectFromProjectTransferConfigRunName( + projectTransferConfigRunName: string + ) { + return this.pathTemplates.projectTransferConfigRunPathTemplate.match( + projectTransferConfigRunName + ).project; } /** @@ -2169,8 +2654,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_transfer_config_run resource. * @returns {string} A string representing the transfer_config. */ - matchTransferConfigFromProjectTransferConfigRunName(projectTransferConfigRunName: string) { - return this.pathTemplates.projectTransferConfigRunPathTemplate.match(projectTransferConfigRunName).transfer_config; + matchTransferConfigFromProjectTransferConfigRunName( + projectTransferConfigRunName: string + ) { + return this.pathTemplates.projectTransferConfigRunPathTemplate.match( + projectTransferConfigRunName + ).transfer_config; } /** @@ -2180,8 +2669,12 @@ export class DataTransferServiceClient { * A fully-qualified path representing project_transfer_config_run resource. * @returns {string} A string representing the run. */ - matchRunFromProjectTransferConfigRunName(projectTransferConfigRunName: string) { - return this.pathTemplates.projectTransferConfigRunPathTemplate.match(projectTransferConfigRunName).run; + matchRunFromProjectTransferConfigRunName( + projectTransferConfigRunName: string + ) { + return this.pathTemplates.projectTransferConfigRunPathTemplate.match( + projectTransferConfigRunName + ).run; } /** diff --git a/packages/google-cloud-bigquery-datatransfer/synth.metadata b/packages/google-cloud-bigquery-datatransfer/synth.metadata index b6b3817fdd9..65e6cf02c27 100644 --- a/packages/google-cloud-bigquery-datatransfer/synth.metadata +++ b/packages/google-cloud-bigquery-datatransfer/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/nodejs-bigquery-data-transfer.git", - "sha": "47f9a79f24eaf23d715449597754975b28c15595" + "sha": "20209c1b2a060eebe934629cdd99bbb4d0f64e7b" } }, { diff --git a/packages/google-cloud-bigquery-datatransfer/system-test/fixtures/sample/src/index.js b/packages/google-cloud-bigquery-datatransfer/system-test/fixtures/sample/src/index.js index b1ac76b2c7c..b2b694466b5 100644 --- a/packages/google-cloud-bigquery-datatransfer/system-test/fixtures/sample/src/index.js +++ b/packages/google-cloud-bigquery-datatransfer/system-test/fixtures/sample/src/index.js @@ -16,7 +16,6 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** - /* eslint-disable node/no-missing-require, no-unused-vars */ const datatransfer = require('@google-cloud/bigquery-data-transfer'); diff --git a/packages/google-cloud-bigquery-datatransfer/system-test/install.ts b/packages/google-cloud-bigquery-datatransfer/system-test/install.ts index 5e4ed636481..4c1ba3eb79a 100644 --- a/packages/google-cloud-bigquery-datatransfer/system-test/install.ts +++ b/packages/google-cloud-bigquery-datatransfer/system-test/install.ts @@ -16,34 +16,36 @@ // ** https://github.com/googleapis/gapic-generator-typescript ** // ** All changes to this file may be overwritten. ** -import { packNTest } from 'pack-n-play'; -import { readFileSync } from 'fs'; -import { describe, it } from 'mocha'; +import {packNTest} from 'pack-n-play'; +import {readFileSync} from 'fs'; +import {describe, it} from 'mocha'; describe('typescript consumer tests', () => { - - it('should have correct type signature for typescript users', async function() { + it('should have correct type signature for typescript users', async function () { this.timeout(300000); const options = { - packageDir: process.cwd(), // path to your module. + packageDir: process.cwd(), // path to your module. sample: { description: 'typescript based user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } + ts: readFileSync( + './system-test/fixtures/sample/src/index.ts' + ).toString(), + }, }; - await packNTest(options); // will throw upon error. + await packNTest(options); // will throw upon error. }); - it('should have correct type signature for javascript users', async function() { + it('should have correct type signature for javascript users', async function () { this.timeout(300000); const options = { - packageDir: process.cwd(), // path to your module. + packageDir: process.cwd(), // path to your module. sample: { description: 'typescript based user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } + ts: readFileSync( + './system-test/fixtures/sample/src/index.js' + ).toString(), + }, }; - await packNTest(options); // will throw upon error. + await packNTest(options); // will throw upon error. }); - }); diff --git a/packages/google-cloud-bigquery-datatransfer/test/gapic_data_transfer_service_v1.ts b/packages/google-cloud-bigquery-datatransfer/test/gapic_data_transfer_service_v1.ts index 8c5dd7398d0..5e9280ea405 100644 --- a/packages/google-cloud-bigquery-datatransfer/test/gapic_data_transfer_service_v1.ts +++ b/packages/google-cloud-bigquery-datatransfer/test/gapic_data_transfer_service_v1.ts @@ -20,7 +20,7 @@ import * as protos from '../protos/protos'; import * as assert from 'assert'; import * as sinon from 'sinon'; import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; +import {describe, it} from 'mocha'; import * as datatransferserviceModule from '../src'; import {PassThrough} from 'stream'; @@ -28,2172 +28,3219 @@ import {PassThrough} from 'stream'; import {protobuf} from 'google-gax'; function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; + const filledObject = (instance.constructor as typeof protobuf.Message).toObject( + instance as protobuf.Message, + {defaults: true} + ); + return (instance.constructor as typeof protobuf.Message).fromObject( + filledObject + ) as T; } function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); + return error + ? sinon.stub().rejects(error) + : sinon.stub().resolves([response]); } -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +function stubSimpleCallWithCallback( + response?: ResponseType, + error?: Error +) { + return error + ? sinon.stub().callsArgWith(2, error) + : sinon.stub().callsArgWith(2, null, response); } -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } +function stubPageStreamingCall( + responses?: ResponseType[], + error?: Error +) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); + } + const transformStub = error + ? sinon.stub().callsArgWith(2, error) + : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { + mockStream.write({}); + }); } - return sinon.stub().returns(mockStream); + setImmediate(() => { + mockStream.end(); + }); + } else { + setImmediate(() => { + mockStream.write({}); + }); + setImmediate(() => { + mockStream.end(); + }); + } + return sinon.stub().returns(mockStream); } -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); +function stubAsyncIterationCall( + responses?: ResponseType[], + error?: Error +) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + }, + }; + }, + }; + return sinon.stub().returns(asyncIterable); } describe('v1.DataTransferServiceClient', () => { - it('has servicePath', () => { - const servicePath = datatransferserviceModule.v1.DataTransferServiceClient.servicePath; - assert(servicePath); + it('has servicePath', () => { + const servicePath = + datatransferserviceModule.v1.DataTransferServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = + datatransferserviceModule.v1.DataTransferServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = datatransferserviceModule.v1.DataTransferServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient({ + fallback: true, }); + assert(client); + }); - it('has apiEndpoint', () => { - const apiEndpoint = datatransferserviceModule.v1.DataTransferServiceClient.apiEndpoint; - assert(apiEndpoint); + it('has initialize method and supports deferred initialization', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', }); - - it('has port', () => { - const port = datatransferserviceModule.v1.DataTransferServiceClient.port; - assert(port); - assert(typeof port === 'number'); + assert.strictEqual(client.dataTransferServiceStub, undefined); + await client.initialize(); + assert(client.dataTransferServiceStub); + }); + + it('has close method', () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', }); - - it('should create a client with no option', () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient(); - assert(client); + client.close(); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new datatransferserviceModule.v1.DataTransferServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', }); - - it('should create a client with gRPC fallback', () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - fallback: true, - }); - assert(client); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new datatransferserviceModule.v1.DataTransferServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: { client_email: 'bogus', private_key: 'bogus' }, - projectId: 'bogus', - }); - assert.strictEqual(client.dataTransferServiceStub, undefined); - await client.initialize(); - assert(client.dataTransferServiceStub); + client.auth.getProjectId = sinon + .stub() + .callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error | null, projectId?: string | null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); }); - - it('has close method', () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: { client_email: 'bogus', private_key: 'bogus' }, - projectId: 'bogus', - }); - client.close(); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('getDataSource', () => { + it('invokes getDataSource without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ); + client.innerApiCalls.getDataSource = stubSimpleCall(expectedResponse); + const [response] = await client.getDataSource(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.getDataSource as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); }); - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: { client_email: 'bogus', private_key: 'bogus' }, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + it('invokes getDataSource without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ); + client.innerApiCalls.getDataSource = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.getDataSource( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.datatransfer.v1.IDataSource | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.getDataSource as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); }); - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: { client_email: 'bogus', private_key: 'bogus' }, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('getDataSource', () => { - it('invokes getDataSource without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()); - client.innerApiCalls.getDataSource = stubSimpleCall(expectedResponse); - const [response] = await client.getDataSource(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getDataSource as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getDataSource without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()); - client.innerApiCalls.getDataSource = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getDataSource( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.IDataSource|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getDataSource as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getDataSource with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getDataSource = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.getDataSource(request); }, expectedError); - assert((client.innerApiCalls.getDataSource as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes getDataSource with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getDataSource = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.getDataSource(request); + }, expectedError); + assert( + (client.innerApiCalls.getDataSource as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); }); - - describe('createTransferConfig', () => { - it('invokes createTransferConfig without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()); - client.innerApiCalls.createTransferConfig = stubSimpleCall(expectedResponse); - const [response] = await client.createTransferConfig(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createTransferConfig without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()); - client.innerApiCalls.createTransferConfig = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createTransferConfig( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createTransferConfig with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createTransferConfig = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.createTransferConfig(request); }, expectedError); - assert((client.innerApiCalls.createTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + }); + + describe('createTransferConfig', () => { + it('invokes createTransferConfig without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ); + client.innerApiCalls.createTransferConfig = stubSimpleCall( + expectedResponse + ); + const [response] = await client.createTransferConfig(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.createTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); }); - describe('updateTransferConfig', () => { - it('invokes updateTransferConfig without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest()); - request.transferConfig = {}; - request.transferConfig.name = ''; - const expectedHeaderRequestParams = "transfer_config.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()); - client.innerApiCalls.updateTransferConfig = stubSimpleCall(expectedResponse); - const [response] = await client.updateTransferConfig(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateTransferConfig without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest()); - request.transferConfig = {}; - request.transferConfig.name = ''; - const expectedHeaderRequestParams = "transfer_config.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()); - client.innerApiCalls.updateTransferConfig = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.updateTransferConfig( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes updateTransferConfig with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest()); - request.transferConfig = {}; - request.transferConfig.name = ''; - const expectedHeaderRequestParams = "transfer_config.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.updateTransferConfig = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.updateTransferConfig(request); }, expectedError); - assert((client.innerApiCalls.updateTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes createTransferConfig without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ); + client.innerApiCalls.createTransferConfig = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.createTransferConfig( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.createTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); }); - describe('deleteTransferConfig', () => { - it('invokes deleteTransferConfig without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteTransferConfig = stubSimpleCall(expectedResponse); - const [response] = await client.deleteTransferConfig(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteTransferConfig without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteTransferConfig = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteTransferConfig( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteTransferConfig with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteTransferConfig = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.deleteTransferConfig(request); }, expectedError); - assert((client.innerApiCalls.deleteTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes createTransferConfig with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createTransferConfig = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.createTransferConfig(request); + }, expectedError); + assert( + (client.innerApiCalls.createTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('updateTransferConfig', () => { + it('invokes updateTransferConfig without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest() + ); + request.transferConfig = {}; + request.transferConfig.name = ''; + const expectedHeaderRequestParams = 'transfer_config.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ); + client.innerApiCalls.updateTransferConfig = stubSimpleCall( + expectedResponse + ); + const [response] = await client.updateTransferConfig(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.updateTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); }); - describe('getTransferConfig', () => { - it('invokes getTransferConfig without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()); - client.innerApiCalls.getTransferConfig = stubSimpleCall(expectedResponse); - const [response] = await client.getTransferConfig(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getTransferConfig without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()); - client.innerApiCalls.getTransferConfig = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getTransferConfig( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes updateTransferConfig without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest() + ); + request.transferConfig = {}; + request.transferConfig.name = ''; + const expectedHeaderRequestParams = 'transfer_config.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ); + client.innerApiCalls.updateTransferConfig = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.updateTransferConfig( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.updateTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); - it('invokes getTransferConfig with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getTransferConfig = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.getTransferConfig(request); }, expectedError); - assert((client.innerApiCalls.getTransferConfig as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes updateTransferConfig with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest() + ); + request.transferConfig = {}; + request.transferConfig.name = ''; + const expectedHeaderRequestParams = 'transfer_config.name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.updateTransferConfig = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.updateTransferConfig(request); + }, expectedError); + assert( + (client.innerApiCalls.updateTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('deleteTransferConfig', () => { + it('invokes deleteTransferConfig without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.deleteTransferConfig = stubSimpleCall( + expectedResponse + ); + const [response] = await client.deleteTransferConfig(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.deleteTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); }); - describe('scheduleTransferRuns', () => { - it('invokes scheduleTransferRuns without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse()); - client.innerApiCalls.scheduleTransferRuns = stubSimpleCall(expectedResponse); - const [response] = await client.scheduleTransferRuns(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.scheduleTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes deleteTransferConfig without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.deleteTransferConfig = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.deleteTransferConfig( + request, + ( + err?: Error | null, + result?: protos.google.protobuf.IEmpty | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.deleteTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); - it('invokes scheduleTransferRuns without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse()); - client.innerApiCalls.scheduleTransferRuns = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.scheduleTransferRuns( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.scheduleTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes deleteTransferConfig with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteTransferConfig = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.deleteTransferConfig(request); + }, expectedError); + assert( + (client.innerApiCalls.deleteTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('getTransferConfig', () => { + it('invokes getTransferConfig without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ); + client.innerApiCalls.getTransferConfig = stubSimpleCall(expectedResponse); + const [response] = await client.getTransferConfig(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.getTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes scheduleTransferRuns with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.scheduleTransferRuns = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.scheduleTransferRuns(request); }, expectedError); - assert((client.innerApiCalls.scheduleTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes getTransferConfig without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ); + client.innerApiCalls.getTransferConfig = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.getTransferConfig( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.getTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); }); - describe('startManualTransferRuns', () => { - it('invokes startManualTransferRuns without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse()); - client.innerApiCalls.startManualTransferRuns = stubSimpleCall(expectedResponse); - const [response] = await client.startManualTransferRuns(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startManualTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes getTransferConfig with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getTransferConfig = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.getTransferConfig(request); + }, expectedError); + assert( + (client.innerApiCalls.getTransferConfig as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('scheduleTransferRuns', () => { + it('invokes scheduleTransferRuns without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse() + ); + client.innerApiCalls.scheduleTransferRuns = stubSimpleCall( + expectedResponse + ); + const [response] = await client.scheduleTransferRuns(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.scheduleTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes startManualTransferRuns without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse()); - client.innerApiCalls.startManualTransferRuns = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startManualTransferRuns( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startManualTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes scheduleTransferRuns without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse() + ); + client.innerApiCalls.scheduleTransferRuns = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.scheduleTransferRuns( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.datatransfer.v1.IScheduleTransferRunsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.scheduleTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); - it('invokes startManualTransferRuns with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startManualTransferRuns = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.startManualTransferRuns(request); }, expectedError); - assert((client.innerApiCalls.startManualTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes scheduleTransferRuns with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.scheduleTransferRuns = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.scheduleTransferRuns(request); + }, expectedError); + assert( + (client.innerApiCalls.scheduleTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('startManualTransferRuns', () => { + it('invokes startManualTransferRuns without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse() + ); + client.innerApiCalls.startManualTransferRuns = stubSimpleCall( + expectedResponse + ); + const [response] = await client.startManualTransferRuns(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.startManualTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); }); - describe('getTransferRun', () => { - it('invokes getTransferRun without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()); - client.innerApiCalls.getTransferRun = stubSimpleCall(expectedResponse); - const [response] = await client.getTransferRun(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getTransferRun as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes startManualTransferRuns without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse() + ); + client.innerApiCalls.startManualTransferRuns = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.startManualTransferRuns( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.datatransfer.v1.IStartManualTransferRunsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.startManualTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); - it('invokes getTransferRun without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()); - client.innerApiCalls.getTransferRun = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getTransferRun( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferRun|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getTransferRun as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes startManualTransferRuns with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startManualTransferRuns = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.startManualTransferRuns(request); + }, expectedError); + assert( + (client.innerApiCalls.startManualTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('getTransferRun', () => { + it('invokes getTransferRun without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ); + client.innerApiCalls.getTransferRun = stubSimpleCall(expectedResponse); + const [response] = await client.getTransferRun(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.getTransferRun as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes getTransferRun with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getTransferRun = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.getTransferRun(request); }, expectedError); - assert((client.innerApiCalls.getTransferRun as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes getTransferRun without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ); + client.innerApiCalls.getTransferRun = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.getTransferRun( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferRun | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.getTransferRun as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); }); - describe('deleteTransferRun', () => { - it('invokes deleteTransferRun without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteTransferRun = stubSimpleCall(expectedResponse); - const [response] = await client.deleteTransferRun(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteTransferRun as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes getTransferRun with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getTransferRun = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.getTransferRun(request); + }, expectedError); + assert( + (client.innerApiCalls.getTransferRun as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('deleteTransferRun', () => { + it('invokes deleteTransferRun without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.deleteTransferRun = stubSimpleCall(expectedResponse); + const [response] = await client.deleteTransferRun(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.deleteTransferRun as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes deleteTransferRun without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteTransferRun = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteTransferRun( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteTransferRun as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes deleteTransferRun without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.protobuf.Empty() + ); + client.innerApiCalls.deleteTransferRun = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.deleteTransferRun( + request, + ( + err?: Error | null, + result?: protos.google.protobuf.IEmpty | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.deleteTransferRun as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); - it('invokes deleteTransferRun with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteTransferRun = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.deleteTransferRun(request); }, expectedError); - assert((client.innerApiCalls.deleteTransferRun as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes deleteTransferRun with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteTransferRun = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.deleteTransferRun(request); + }, expectedError); + assert( + (client.innerApiCalls.deleteTransferRun as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('checkValidCreds', () => { + it('invokes checkValidCreds without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse() + ); + client.innerApiCalls.checkValidCreds = stubSimpleCall(expectedResponse); + const [response] = await client.checkValidCreds(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.checkValidCreds as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); }); - describe('checkValidCreds', () => { - it('invokes checkValidCreds without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse()); - client.innerApiCalls.checkValidCreds = stubSimpleCall(expectedResponse); - const [response] = await client.checkValidCreds(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.checkValidCreds as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes checkValidCreds without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse() + ); + client.innerApiCalls.checkValidCreds = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.checkValidCreds( + request, + ( + err?: Error | null, + result?: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.checkValidCreds as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); + }); - it('invokes checkValidCreds without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse()); - client.innerApiCalls.checkValidCreds = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.checkValidCreds( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.ICheckValidCredsResponse|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.checkValidCreds as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes checkValidCreds with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest() + ); + request.name = ''; + const expectedHeaderRequestParams = 'name='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.checkValidCreds = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.checkValidCreds(request); + }, expectedError); + assert( + (client.innerApiCalls.checkValidCreds as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); + }); + + describe('listDataSources', () => { + it('invokes listDataSources without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + ]; + client.innerApiCalls.listDataSources = stubSimpleCall(expectedResponse); + const [response] = await client.listDataSources(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.listDataSources as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes checkValidCreds with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.checkValidCreds = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.checkValidCreds(request); }, expectedError); - assert((client.innerApiCalls.checkValidCreds as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes listDataSources without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + ]; + client.innerApiCalls.listDataSources = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.listDataSources( + request, + ( + err?: Error | null, + result?: + | protos.google.cloud.bigquery.datatransfer.v1.IDataSource[] + | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.listDataSources as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); }); - describe('listDataSources', () => { - it('invokes listDataSources without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - ]; - client.innerApiCalls.listDataSources = stubSimpleCall(expectedResponse); - const [response] = await client.listDataSources(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listDataSources as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes listDataSources with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listDataSources = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.listDataSources(request); + }, expectedError); + assert( + (client.innerApiCalls.listDataSources as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes listDataSources without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - ]; - client.innerApiCalls.listDataSources = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listDataSources( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.IDataSource[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listDataSources as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes listDataSourcesStream without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + ]; + client.descriptors.page.listDataSources.createStream = stubPageStreamingCall( + expectedResponse + ); + const stream = client.listDataSourcesStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.DataSource[] = []; + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.datatransfer.v1.DataSource + ) => { + responses.push(response); + } + ); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert( + (client.descriptors.page.listDataSources.createStream as SinonStub) + .getCall(0) + .calledWith(client.innerApiCalls.listDataSources, request) + ); + assert.strictEqual( + (client.descriptors.page.listDataSources + .createStream as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listDataSources with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listDataSources = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.listDataSources(request); }, expectedError); - assert((client.innerApiCalls.listDataSources as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes listDataSourcesStream with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedError = new Error('expected'); + client.descriptors.page.listDataSources.createStream = stubPageStreamingCall( + undefined, + expectedError + ); + const stream = client.listDataSourcesStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.DataSource[] = []; + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.datatransfer.v1.DataSource + ) => { + responses.push(response); + } + ); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(async () => { + await promise; + }, expectedError); + assert( + (client.descriptors.page.listDataSources.createStream as SinonStub) + .getCall(0) + .calledWith(client.innerApiCalls.listDataSources, request) + ); + assert.strictEqual( + (client.descriptors.page.listDataSources + .createStream as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listDataSourcesStream without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - ]; - client.descriptors.page.listDataSources.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listDataSourcesStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.DataSource[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.datatransfer.v1.DataSource) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listDataSources.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listDataSources, request)); - assert.strictEqual( - (client.descriptors.page.listDataSources.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('uses async iteration with listDataSources without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.DataSource() + ), + ]; + client.descriptors.page.listDataSources.asyncIterate = stubAsyncIterationCall( + expectedResponse + ); + const responses: protos.google.cloud.bigquery.datatransfer.v1.IDataSource[] = []; + const iterable = client.listDataSourcesAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listDataSources + .asyncIterate as SinonStub).getCall(0).args[1], + request + ); + assert.strictEqual( + (client.descriptors.page.listDataSources + .asyncIterate as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listDataSourcesStream with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listDataSources.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listDataSourcesStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.DataSource[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.datatransfer.v1.DataSource) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(async () => { await promise; }, expectedError); - assert((client.descriptors.page.listDataSources.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listDataSources, request)); - assert.strictEqual( - (client.descriptors.page.listDataSources.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('uses async iteration with listDataSources with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedError = new Error('expected'); + client.descriptors.page.listDataSources.asyncIterate = stubAsyncIterationCall( + undefined, + expectedError + ); + const iterable = client.listDataSourcesAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.IDataSource[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listDataSources + .asyncIterate as SinonStub).getCall(0).args[1], + request + ); + assert.strictEqual( + (client.descriptors.page.listDataSources + .asyncIterate as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); + }); + + describe('listTransferConfigs', () => { + it('invokes listTransferConfigs without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + ]; + client.innerApiCalls.listTransferConfigs = stubSimpleCall( + expectedResponse + ); + const [response] = await client.listTransferConfigs(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.listTransferConfigs as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('uses async iteration with listDataSources without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.DataSource()), - ]; - client.descriptors.page.listDataSources.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.datatransfer.v1.IDataSource[] = []; - const iterable = client.listDataSourcesAsync(request); - for await (const resource of iterable) { - responses.push(resource!); + it('invokes listTransferConfigs without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + ]; + client.innerApiCalls.listTransferConfigs = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.listTransferConfigs( + request, + ( + err?: Error | null, + result?: + | protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[] + | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listDataSources.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listDataSources.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listDataSources with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listDataSources.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listDataSourcesAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.IDataSource[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listDataSources.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listDataSources.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.listTransferConfigs as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); }); - describe('listTransferConfigs', () => { - it('invokes listTransferConfigs without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - ]; - client.innerApiCalls.listTransferConfigs = stubSimpleCall(expectedResponse); - const [response] = await client.listTransferConfigs(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listTransferConfigs as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes listTransferConfigs with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listTransferConfigs = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.listTransferConfigs(request); + }, expectedError); + assert( + (client.innerApiCalls.listTransferConfigs as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes listTransferConfigs without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - ]; - client.innerApiCalls.listTransferConfigs = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listTransferConfigs( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listTransferConfigs as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes listTransferConfigsStream without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + ]; + client.descriptors.page.listTransferConfigs.createStream = stubPageStreamingCall( + expectedResponse + ); + const stream = client.listTransferConfigsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferConfig[] = []; + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.datatransfer.v1.TransferConfig + ) => { + responses.push(response); + } + ); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert( + (client.descriptors.page.listTransferConfigs.createStream as SinonStub) + .getCall(0) + .calledWith(client.innerApiCalls.listTransferConfigs, request) + ); + assert.strictEqual( + (client.descriptors.page.listTransferConfigs + .createStream as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listTransferConfigs with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listTransferConfigs = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.listTransferConfigs(request); }, expectedError); - assert((client.innerApiCalls.listTransferConfigs as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes listTransferConfigsStream with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedError = new Error('expected'); + client.descriptors.page.listTransferConfigs.createStream = stubPageStreamingCall( + undefined, + expectedError + ); + const stream = client.listTransferConfigsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferConfig[] = []; + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.datatransfer.v1.TransferConfig + ) => { + responses.push(response); + } + ); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(async () => { + await promise; + }, expectedError); + assert( + (client.descriptors.page.listTransferConfigs.createStream as SinonStub) + .getCall(0) + .calledWith(client.innerApiCalls.listTransferConfigs, request) + ); + assert.strictEqual( + (client.descriptors.page.listTransferConfigs + .createStream as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listTransferConfigsStream without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - ]; - client.descriptors.page.listTransferConfigs.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listTransferConfigsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferConfig[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.datatransfer.v1.TransferConfig) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listTransferConfigs.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listTransferConfigs, request)); - assert.strictEqual( - (client.descriptors.page.listTransferConfigs.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('uses async iteration with listTransferConfigs without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig() + ), + ]; + client.descriptors.page.listTransferConfigs.asyncIterate = stubAsyncIterationCall( + expectedResponse + ); + const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[] = []; + const iterable = client.listTransferConfigsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listTransferConfigs + .asyncIterate as SinonStub).getCall(0).args[1], + request + ); + assert.strictEqual( + (client.descriptors.page.listTransferConfigs + .asyncIterate as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listTransferConfigsStream with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listTransferConfigs.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listTransferConfigsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferConfig[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.datatransfer.v1.TransferConfig) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(async () => { await promise; }, expectedError); - assert((client.descriptors.page.listTransferConfigs.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listTransferConfigs, request)); - assert.strictEqual( - (client.descriptors.page.listTransferConfigs.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('uses async iteration with listTransferConfigs with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedError = new Error('expected'); + client.descriptors.page.listTransferConfigs.asyncIterate = stubAsyncIterationCall( + undefined, + expectedError + ); + const iterable = client.listTransferConfigsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listTransferConfigs + .asyncIterate as SinonStub).getCall(0).args[1], + request + ); + assert.strictEqual( + (client.descriptors.page.listTransferConfigs + .asyncIterate as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); + }); + + describe('listTransferRuns', () => { + it('invokes listTransferRuns without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + ]; + client.innerApiCalls.listTransferRuns = stubSimpleCall(expectedResponse); + const [response] = await client.listTransferRuns(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.listTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('uses async iteration with listTransferConfigs without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferConfig()), - ]; - client.descriptors.page.listTransferConfigs.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[] = []; - const iterable = client.listTransferConfigsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); + it('invokes listTransferRuns without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + ]; + client.innerApiCalls.listTransferRuns = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.listTransferRuns( + request, + ( + err?: Error | null, + result?: + | protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[] + | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listTransferConfigs.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listTransferConfigs.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listTransferConfigs with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listTransferConfigs.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listTransferConfigsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferConfig[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listTransferConfigs.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listTransferConfigs.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.listTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); }); - describe('listTransferRuns', () => { - it('invokes listTransferRuns without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - ]; - client.innerApiCalls.listTransferRuns = stubSimpleCall(expectedResponse); - const [response] = await client.listTransferRuns(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes listTransferRuns with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listTransferRuns = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.listTransferRuns(request); + }, expectedError); + assert( + (client.innerApiCalls.listTransferRuns as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes listTransferRuns without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - ]; - client.innerApiCalls.listTransferRuns = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listTransferRuns( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); + it('invokes listTransferRunsStream without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + ]; + client.descriptors.page.listTransferRuns.createStream = stubPageStreamingCall( + expectedResponse + ); + const stream = client.listTransferRunsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferRun[] = []; + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.datatransfer.v1.TransferRun + ) => { + responses.push(response); + } + ); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert( + (client.descriptors.page.listTransferRuns.createStream as SinonStub) + .getCall(0) + .calledWith(client.innerApiCalls.listTransferRuns, request) + ); + assert.strictEqual( + (client.descriptors.page.listTransferRuns + .createStream as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listTransferRuns with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listTransferRuns = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.listTransferRuns(request); }, expectedError); - assert((client.innerApiCalls.listTransferRuns as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); + it('invokes listTransferRunsStream with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedError = new Error('expected'); + client.descriptors.page.listTransferRuns.createStream = stubPageStreamingCall( + undefined, + expectedError + ); + const stream = client.listTransferRunsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferRun[] = []; + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.datatransfer.v1.TransferRun + ) => { + responses.push(response); + } + ); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(async () => { + await promise; + }, expectedError); + assert( + (client.descriptors.page.listTransferRuns.createStream as SinonStub) + .getCall(0) + .calledWith(client.innerApiCalls.listTransferRuns, request) + ); + assert.strictEqual( + (client.descriptors.page.listTransferRuns + .createStream as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listTransferRunsStream without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - ]; - client.descriptors.page.listTransferRuns.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listTransferRunsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferRun[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.datatransfer.v1.TransferRun) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listTransferRuns.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listTransferRuns, request)); - assert.strictEqual( - (client.descriptors.page.listTransferRuns.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('uses async iteration with listTransferRuns without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferRun() + ), + ]; + client.descriptors.page.listTransferRuns.asyncIterate = stubAsyncIterationCall( + expectedResponse + ); + const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[] = []; + const iterable = client.listTransferRunsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listTransferRuns + .asyncIterate as SinonStub).getCall(0).args[1], + request + ); + assert.strictEqual( + (client.descriptors.page.listTransferRuns + .asyncIterate as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('invokes listTransferRunsStream with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listTransferRuns.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listTransferRunsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferRun[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.datatransfer.v1.TransferRun) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(async () => { await promise; }, expectedError); - assert((client.descriptors.page.listTransferRuns.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listTransferRuns, request)); - assert.strictEqual( - (client.descriptors.page.listTransferRuns.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('uses async iteration with listTransferRuns with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedError = new Error('expected'); + client.descriptors.page.listTransferRuns.asyncIterate = stubAsyncIterationCall( + undefined, + expectedError + ); + const iterable = client.listTransferRunsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listTransferRuns + .asyncIterate as SinonStub).getCall(0).args[1], + request + ); + assert.strictEqual( + (client.descriptors.page.listTransferRuns + .asyncIterate as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); + }); + + describe('listTransferLogs', () => { + it('invokes listTransferLogs without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + ]; + client.innerApiCalls.listTransferLogs = stubSimpleCall(expectedResponse); + const [response] = await client.listTransferLogs(request); + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.listTransferLogs as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('uses async iteration with listTransferRuns without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferRun()), - ]; - client.descriptors.page.listTransferRuns.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[] = []; - const iterable = client.listTransferRunsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); + it('invokes listTransferLogs without error using callback', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + ]; + client.innerApiCalls.listTransferLogs = stubSimpleCallWithCallback( + expectedResponse + ); + const promise = new Promise((resolve, reject) => { + client.listTransferLogs( + request, + ( + err?: Error | null, + result?: + | protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[] + | null + ) => { + if (err) { + reject(err); + } else { + resolve(result); } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listTransferRuns.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listTransferRuns.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listTransferRuns with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listTransferRuns.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listTransferRunsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferRun[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listTransferRuns.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listTransferRuns.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + } + ); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert( + (client.innerApiCalls.listTransferLogs as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions /*, callback defined above */) + ); }); - describe('listTransferLogs', () => { - it('invokes listTransferLogs without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - ]; - client.innerApiCalls.listTransferLogs = stubSimpleCall(expectedResponse); - const [response] = await client.listTransferLogs(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listTransferLogs as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listTransferLogs without error using callback', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - ]; - client.innerApiCalls.listTransferLogs = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listTransferLogs( - request, - (err?: Error|null, result?: protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listTransferLogs as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listTransferLogs with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listTransferLogs = stubSimpleCall(undefined, expectedError); - await assert.rejects(async () => { await client.listTransferLogs(request); }, expectedError); - assert((client.innerApiCalls.listTransferLogs as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listTransferLogsStream without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - ]; - client.descriptors.page.listTransferLogs.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listTransferLogsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferMessage[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.datatransfer.v1.TransferMessage) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listTransferLogs.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listTransferLogs, request)); - assert.strictEqual( - (client.descriptors.page.listTransferLogs.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('invokes listTransferLogs with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listTransferLogs = stubSimpleCall( + undefined, + expectedError + ); + await assert.rejects(async () => { + await client.listTransferLogs(request); + }, expectedError); + assert( + (client.innerApiCalls.listTransferLogs as SinonStub) + .getCall(0) + .calledWith(request, expectedOptions, undefined) + ); + }); - it('invokes listTransferLogsStream with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listTransferLogs.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listTransferLogsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferMessage[] = []; - stream.on('data', (response: protos.google.cloud.bigquery.datatransfer.v1.TransferMessage) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(async () => { await promise; }, expectedError); - assert((client.descriptors.page.listTransferLogs.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listTransferLogs, request)); - assert.strictEqual( - (client.descriptors.page.listTransferLogs.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('invokes listTransferLogsStream without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + ]; + client.descriptors.page.listTransferLogs.createStream = stubPageStreamingCall( + expectedResponse + ); + const stream = client.listTransferLogsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferMessage[] = []; + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.datatransfer.v1.TransferMessage + ) => { + responses.push(response); + } + ); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert( + (client.descriptors.page.listTransferLogs.createStream as SinonStub) + .getCall(0) + .calledWith(client.innerApiCalls.listTransferLogs, request) + ); + assert.strictEqual( + (client.descriptors.page.listTransferLogs + .createStream as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('uses async iteration with listTransferLogs without error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage()), - ]; - client.descriptors.page.listTransferLogs.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[] = []; - const iterable = client.listTransferLogsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listTransferLogs.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listTransferLogs.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('invokes listTransferLogsStream with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedError = new Error('expected'); + client.descriptors.page.listTransferLogs.createStream = stubPageStreamingCall( + undefined, + expectedError + ); + const stream = client.listTransferLogsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.TransferMessage[] = []; + stream.on( + 'data', + ( + response: protos.google.cloud.bigquery.datatransfer.v1.TransferMessage + ) => { + responses.push(response); + } + ); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(async () => { + await promise; + }, expectedError); + assert( + (client.descriptors.page.listTransferLogs.createStream as SinonStub) + .getCall(0) + .calledWith(client.innerApiCalls.listTransferLogs, request) + ); + assert.strictEqual( + (client.descriptors.page.listTransferLogs + .createStream as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); - it('uses async iteration with listTransferLogs with error', async () => { - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listTransferLogs.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listTransferLogsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listTransferLogs.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listTransferLogs.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); + it('uses async iteration with listTransferLogs without error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedResponse = [ + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.TransferMessage() + ), + ]; + client.descriptors.page.listTransferLogs.asyncIterate = stubAsyncIterationCall( + expectedResponse + ); + const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[] = []; + const iterable = client.listTransferLogsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listTransferLogs + .asyncIterate as SinonStub).getCall(0).args[1], + request + ); + assert.strictEqual( + (client.descriptors.page.listTransferLogs + .asyncIterate as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); }); - describe('Path templates', () => { - - describe('project', () => { - const fakePath = "/rendered/path/project"; - const expectedParameters = { - project: "projectValue", - }; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectPath', () => { - const result = client.projectPath("projectValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectName', () => { - const result = client.matchProjectFromProjectName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); + it('uses async iteration with listTransferLogs with error', async () => { + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + const request = generateSampleMessage( + new protos.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest() + ); + request.parent = ''; + const expectedHeaderRequestParams = 'parent='; + const expectedError = new Error('expected'); + client.descriptors.page.listTransferLogs.asyncIterate = stubAsyncIterationCall( + undefined, + expectedError + ); + const iterable = client.listTransferLogsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.bigquery.datatransfer.v1.ITransferMessage[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listTransferLogs + .asyncIterate as SinonStub).getCall(0).args[1], + request + ); + assert.strictEqual( + (client.descriptors.page.listTransferLogs + .asyncIterate as SinonStub).getCall(0).args[2].otherArgs.headers[ + 'x-goog-request-params' + ], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + describe('project', () => { + const fakePath = '/rendered/path/project'; + const expectedParameters = { + project: 'projectValue', + }; + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath('projectValue'); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); - describe('projectDataSource', () => { - const fakePath = "/rendered/path/projectDataSource"; - const expectedParameters = { - project: "projectValue", - data_source: "dataSourceValue", - }; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectDataSourcePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectDataSourcePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectDataSourcePath', () => { - const result = client.projectDataSourcePath("projectValue", "dataSourceValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectDataSourcePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectDataSourceName', () => { - const result = client.matchProjectFromProjectDataSourceName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectDataSourcePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchDataSourceFromProjectDataSourceName', () => { - const result = client.matchDataSourceFromProjectDataSourceName(fakePath); - assert.strictEqual(result, "dataSourceValue"); - assert((client.pathTemplates.projectDataSourcePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); + describe('projectDataSource', () => { + const fakePath = '/rendered/path/projectDataSource'; + const expectedParameters = { + project: 'projectValue', + data_source: 'dataSourceValue', + }; + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.projectDataSourcePathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectDataSourcePathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectDataSourcePath', () => { + const result = client.projectDataSourcePath( + 'projectValue', + 'dataSourceValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectDataSourcePathTemplate + .render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectDataSourceName', () => { + const result = client.matchProjectFromProjectDataSourceName(fakePath); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectDataSourcePathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDataSourceFromProjectDataSourceName', () => { + const result = client.matchDataSourceFromProjectDataSourceName( + fakePath + ); + assert.strictEqual(result, 'dataSourceValue'); + assert( + (client.pathTemplates.projectDataSourcePathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); - describe('projectLocationDataSource', () => { - const fakePath = "/rendered/path/projectLocationDataSource"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - data_source: "dataSourceValue", - }; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationDataSourcePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationDataSourcePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationDataSourcePath', () => { - const result = client.projectLocationDataSourcePath("projectValue", "locationValue", "dataSourceValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationDataSourcePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationDataSourceName', () => { - const result = client.matchProjectFromProjectLocationDataSourceName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationDataSourcePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationDataSourceName', () => { - const result = client.matchLocationFromProjectLocationDataSourceName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationDataSourcePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchDataSourceFromProjectLocationDataSourceName', () => { - const result = client.matchDataSourceFromProjectLocationDataSourceName(fakePath); - assert.strictEqual(result, "dataSourceValue"); - assert((client.pathTemplates.projectLocationDataSourcePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); + describe('projectLocationDataSource', () => { + const fakePath = '/rendered/path/projectLocationDataSource'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + data_source: 'dataSourceValue', + }; + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.projectLocationDataSourcePathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectLocationDataSourcePathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectLocationDataSourcePath', () => { + const result = client.projectLocationDataSourcePath( + 'projectValue', + 'locationValue', + 'dataSourceValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectLocationDataSourcePathTemplate + .render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectLocationDataSourceName', () => { + const result = client.matchProjectFromProjectLocationDataSourceName( + fakePath + ); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectLocationDataSourcePathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromProjectLocationDataSourceName', () => { + const result = client.matchLocationFromProjectLocationDataSourceName( + fakePath + ); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.projectLocationDataSourcePathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchDataSourceFromProjectLocationDataSourceName', () => { + const result = client.matchDataSourceFromProjectLocationDataSourceName( + fakePath + ); + assert.strictEqual(result, 'dataSourceValue'); + assert( + (client.pathTemplates.projectLocationDataSourcePathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); - describe('projectLocationTransferConfig', () => { - const fakePath = "/rendered/path/projectLocationTransferConfig"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - transfer_config: "transferConfigValue", - }; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationTransferConfigPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationTransferConfigPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationTransferConfigPath', () => { - const result = client.projectLocationTransferConfigPath("projectValue", "locationValue", "transferConfigValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationTransferConfigPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationTransferConfigName', () => { - const result = client.matchProjectFromProjectLocationTransferConfigName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationTransferConfigPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationTransferConfigName', () => { - const result = client.matchLocationFromProjectLocationTransferConfigName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationTransferConfigPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchTransferConfigFromProjectLocationTransferConfigName', () => { - const result = client.matchTransferConfigFromProjectLocationTransferConfigName(fakePath); - assert.strictEqual(result, "transferConfigValue"); - assert((client.pathTemplates.projectLocationTransferConfigPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); + describe('projectLocationTransferConfig', () => { + const fakePath = '/rendered/path/projectLocationTransferConfig'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + transfer_config: 'transferConfigValue', + }; + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.projectLocationTransferConfigPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectLocationTransferConfigPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectLocationTransferConfigPath', () => { + const result = client.projectLocationTransferConfigPath( + 'projectValue', + 'locationValue', + 'transferConfigValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectLocationTransferConfigPathTemplate + .render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectLocationTransferConfigName', () => { + const result = client.matchProjectFromProjectLocationTransferConfigName( + fakePath + ); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectLocationTransferConfigPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromProjectLocationTransferConfigName', () => { + const result = client.matchLocationFromProjectLocationTransferConfigName( + fakePath + ); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.projectLocationTransferConfigPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTransferConfigFromProjectLocationTransferConfigName', () => { + const result = client.matchTransferConfigFromProjectLocationTransferConfigName( + fakePath + ); + assert.strictEqual(result, 'transferConfigValue'); + assert( + (client.pathTemplates.projectLocationTransferConfigPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); - describe('projectLocationTransferConfigRun', () => { - const fakePath = "/rendered/path/projectLocationTransferConfigRun"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - transfer_config: "transferConfigValue", - run: "runValue", - }; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationTransferConfigRunPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationTransferConfigRunPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationTransferConfigRunPath', () => { - const result = client.projectLocationTransferConfigRunPath("projectValue", "locationValue", "transferConfigValue", "runValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationTransferConfigRunPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationTransferConfigRunName', () => { - const result = client.matchProjectFromProjectLocationTransferConfigRunName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationTransferConfigRunPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationTransferConfigRunName', () => { - const result = client.matchLocationFromProjectLocationTransferConfigRunName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationTransferConfigRunPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchTransferConfigFromProjectLocationTransferConfigRunName', () => { - const result = client.matchTransferConfigFromProjectLocationTransferConfigRunName(fakePath); - assert.strictEqual(result, "transferConfigValue"); - assert((client.pathTemplates.projectLocationTransferConfigRunPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRunFromProjectLocationTransferConfigRunName', () => { - const result = client.matchRunFromProjectLocationTransferConfigRunName(fakePath); - assert.strictEqual(result, "runValue"); - assert((client.pathTemplates.projectLocationTransferConfigRunPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); + describe('projectLocationTransferConfigRun', () => { + const fakePath = '/rendered/path/projectLocationTransferConfigRun'; + const expectedParameters = { + project: 'projectValue', + location: 'locationValue', + transfer_config: 'transferConfigValue', + run: 'runValue', + }; + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.projectLocationTransferConfigRunPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectLocationTransferConfigRunPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectLocationTransferConfigRunPath', () => { + const result = client.projectLocationTransferConfigRunPath( + 'projectValue', + 'locationValue', + 'transferConfigValue', + 'runValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectLocationTransferConfigRunPathTemplate + .render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectLocationTransferConfigRunName', () => { + const result = client.matchProjectFromProjectLocationTransferConfigRunName( + fakePath + ); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectLocationTransferConfigRunPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchLocationFromProjectLocationTransferConfigRunName', () => { + const result = client.matchLocationFromProjectLocationTransferConfigRunName( + fakePath + ); + assert.strictEqual(result, 'locationValue'); + assert( + (client.pathTemplates.projectLocationTransferConfigRunPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTransferConfigFromProjectLocationTransferConfigRunName', () => { + const result = client.matchTransferConfigFromProjectLocationTransferConfigRunName( + fakePath + ); + assert.strictEqual(result, 'transferConfigValue'); + assert( + (client.pathTemplates.projectLocationTransferConfigRunPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchRunFromProjectLocationTransferConfigRunName', () => { + const result = client.matchRunFromProjectLocationTransferConfigRunName( + fakePath + ); + assert.strictEqual(result, 'runValue'); + assert( + (client.pathTemplates.projectLocationTransferConfigRunPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); - describe('projectTransferConfig', () => { - const fakePath = "/rendered/path/projectTransferConfig"; - const expectedParameters = { - project: "projectValue", - transfer_config: "transferConfigValue", - }; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectTransferConfigPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectTransferConfigPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectTransferConfigPath', () => { - const result = client.projectTransferConfigPath("projectValue", "transferConfigValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectTransferConfigPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectTransferConfigName', () => { - const result = client.matchProjectFromProjectTransferConfigName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectTransferConfigPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchTransferConfigFromProjectTransferConfigName', () => { - const result = client.matchTransferConfigFromProjectTransferConfigName(fakePath); - assert.strictEqual(result, "transferConfigValue"); - assert((client.pathTemplates.projectTransferConfigPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); + describe('projectTransferConfig', () => { + const fakePath = '/rendered/path/projectTransferConfig'; + const expectedParameters = { + project: 'projectValue', + transfer_config: 'transferConfigValue', + }; + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.projectTransferConfigPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectTransferConfigPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectTransferConfigPath', () => { + const result = client.projectTransferConfigPath( + 'projectValue', + 'transferConfigValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectTransferConfigPathTemplate + .render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectTransferConfigName', () => { + const result = client.matchProjectFromProjectTransferConfigName( + fakePath + ); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectTransferConfigPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTransferConfigFromProjectTransferConfigName', () => { + const result = client.matchTransferConfigFromProjectTransferConfigName( + fakePath + ); + assert.strictEqual(result, 'transferConfigValue'); + assert( + (client.pathTemplates.projectTransferConfigPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + }); - describe('projectTransferConfigRun', () => { - const fakePath = "/rendered/path/projectTransferConfigRun"; - const expectedParameters = { - project: "projectValue", - transfer_config: "transferConfigValue", - run: "runValue", - }; - const client = new datatransferserviceModule.v1.DataTransferServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectTransferConfigRunPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectTransferConfigRunPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectTransferConfigRunPath', () => { - const result = client.projectTransferConfigRunPath("projectValue", "transferConfigValue", "runValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectTransferConfigRunPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectTransferConfigRunName', () => { - const result = client.matchProjectFromProjectTransferConfigRunName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectTransferConfigRunPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchTransferConfigFromProjectTransferConfigRunName', () => { - const result = client.matchTransferConfigFromProjectTransferConfigRunName(fakePath); - assert.strictEqual(result, "transferConfigValue"); - assert((client.pathTemplates.projectTransferConfigRunPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRunFromProjectTransferConfigRunName', () => { - const result = client.matchRunFromProjectTransferConfigRunName(fakePath); - assert.strictEqual(result, "runValue"); - assert((client.pathTemplates.projectTransferConfigRunPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); + describe('projectTransferConfigRun', () => { + const fakePath = '/rendered/path/projectTransferConfigRun'; + const expectedParameters = { + project: 'projectValue', + transfer_config: 'transferConfigValue', + run: 'runValue', + }; + const client = new datatransferserviceModule.v1.DataTransferServiceClient( + { + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + } + ); + client.initialize(); + client.pathTemplates.projectTransferConfigRunPathTemplate.render = sinon + .stub() + .returns(fakePath); + client.pathTemplates.projectTransferConfigRunPathTemplate.match = sinon + .stub() + .returns(expectedParameters); + + it('projectTransferConfigRunPath', () => { + const result = client.projectTransferConfigRunPath( + 'projectValue', + 'transferConfigValue', + 'runValue' + ); + assert.strictEqual(result, fakePath); + assert( + (client.pathTemplates.projectTransferConfigRunPathTemplate + .render as SinonStub) + .getCall(-1) + .calledWith(expectedParameters) + ); + }); + + it('matchProjectFromProjectTransferConfigRunName', () => { + const result = client.matchProjectFromProjectTransferConfigRunName( + fakePath + ); + assert.strictEqual(result, 'projectValue'); + assert( + (client.pathTemplates.projectTransferConfigRunPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchTransferConfigFromProjectTransferConfigRunName', () => { + const result = client.matchTransferConfigFromProjectTransferConfigRunName( + fakePath + ); + assert.strictEqual(result, 'transferConfigValue'); + assert( + (client.pathTemplates.projectTransferConfigRunPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); + + it('matchRunFromProjectTransferConfigRunName', () => { + const result = client.matchRunFromProjectTransferConfigRunName( + fakePath + ); + assert.strictEqual(result, 'runValue'); + assert( + (client.pathTemplates.projectTransferConfigRunPathTemplate + .match as SinonStub) + .getCall(-1) + .calledWith(fakePath) + ); + }); }); + }); }); diff --git a/packages/google-cloud-bigquery-datatransfer/webpack.config.js b/packages/google-cloud-bigquery-datatransfer/webpack.config.js index 0e5d2cb8b90..73a0b05880f 100644 --- a/packages/google-cloud-bigquery-datatransfer/webpack.config.js +++ b/packages/google-cloud-bigquery-datatransfer/webpack.config.js @@ -36,27 +36,27 @@ module.exports = { { test: /\.tsx?$/, use: 'ts-loader', - exclude: /node_modules/ + exclude: /node_modules/, }, { test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' + use: 'null-loader', }, { test: /node_modules[\\/]grpc/, - use: 'null-loader' + use: 'null-loader', }, { test: /node_modules[\\/]retry-request/, - use: 'null-loader' + use: 'null-loader', }, { test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' + use: 'null-loader', }, { test: /node_modules[\\/]gtoken/, - use: 'null-loader' + use: 'null-loader', }, ], },