| // Copyright 2021 Google LLC |
| // |
| // Use of this source code is governed by a BSD-style |
| // license that can be found in the LICENSE file or at |
| // https://developers.google.com/open-source/licenses/bsd |
| |
| // This is a generated file (see the discoveryapis_generator project). |
| |
| // ignore_for_file: camel_case_types |
| // ignore_for_file: comment_references |
| // ignore_for_file: deprecated_member_use_from_same_package |
| // ignore_for_file: doc_directive_unknown |
| // ignore_for_file: lines_longer_than_80_chars |
| // ignore_for_file: non_constant_identifier_names |
| // ignore_for_file: prefer_interpolation_to_compose_strings |
| // ignore_for_file: unintended_html_in_doc_comment |
| // ignore_for_file: unnecessary_brace_in_string_interps |
| // ignore_for_file: unnecessary_lambdas |
| // ignore_for_file: unnecessary_string_interpolations |
| |
| /// Datastream API - v1 |
| /// |
| /// For more information, see <https://cloud.google.com/datastream/> |
| /// |
| /// Create an instance of [DatastreamApi] to access these resources: |
| /// |
| /// - [ProjectsResource] |
| /// - [ProjectsLocationsResource] |
| /// - [ProjectsLocationsConnectionProfilesResource] |
| /// - [ProjectsLocationsOperationsResource] |
| /// - [ProjectsLocationsPrivateConnectionsResource] |
| /// - [ProjectsLocationsPrivateConnectionsRoutesResource] |
| /// - [ProjectsLocationsStreamsResource] |
| /// - [ProjectsLocationsStreamsObjectsResource] |
| library; |
| |
| import 'dart:async' as async; |
| import 'dart:convert' as convert; |
| import 'dart:core' as core; |
| |
| import 'package:_discoveryapis_commons/_discoveryapis_commons.dart' as commons; |
| import 'package:http/http.dart' as http; |
| |
| import '../shared.dart'; |
| import '../src/user_agent.dart'; |
| |
| export 'package:_discoveryapis_commons/_discoveryapis_commons.dart' |
| show ApiRequestError, DetailedApiRequestError; |
| |
| class DatastreamApi { |
| /// See, edit, configure, and delete your Google Cloud data and see the email |
| /// address for your Google Account. |
| static const cloudPlatformScope = |
| 'https://www.googleapis.com/auth/cloud-platform'; |
| |
| final commons.ApiRequester _requester; |
| |
| ProjectsResource get projects => ProjectsResource(_requester); |
| |
| DatastreamApi( |
| http.Client client, { |
| core.String rootUrl = 'https://datastream.googleapis.com/', |
| core.String servicePath = '', |
| }) : _requester = commons.ApiRequester( |
| client, |
| rootUrl, |
| servicePath, |
| requestHeaders, |
| ); |
| } |
| |
| class ProjectsResource { |
| final commons.ApiRequester _requester; |
| |
| ProjectsLocationsResource get locations => |
| ProjectsLocationsResource(_requester); |
| |
| ProjectsResource(commons.ApiRequester client) : _requester = client; |
| } |
| |
| class ProjectsLocationsResource { |
| final commons.ApiRequester _requester; |
| |
| ProjectsLocationsConnectionProfilesResource get connectionProfiles => |
| ProjectsLocationsConnectionProfilesResource(_requester); |
| ProjectsLocationsOperationsResource get operations => |
| ProjectsLocationsOperationsResource(_requester); |
| ProjectsLocationsPrivateConnectionsResource get privateConnections => |
| ProjectsLocationsPrivateConnectionsResource(_requester); |
| ProjectsLocationsStreamsResource get streams => |
| ProjectsLocationsStreamsResource(_requester); |
| |
| ProjectsLocationsResource(commons.ApiRequester client) : _requester = client; |
| |
| /// The FetchStaticIps API call exposes the static IP addresses used by |
| /// Datastream. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The resource name for the location for which static IPs |
| /// should be returned. Must be in the format `projects / * /locations / * `. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [pageSize] - Optional. Maximum number of Ips to return, will likely not be |
| /// specified. |
| /// |
| /// [pageToken] - Optional. A page token, received from a previous |
| /// `ListStaticIps` call. will likely not be specified. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [FetchStaticIpsResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<FetchStaticIpsResponse> fetchStaticIps( |
| core.String name, { |
| core.int? pageSize, |
| core.String? pageToken, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'pageSize': ?pageSize == null ? null : ['${pageSize}'], |
| 'pageToken': ?pageToken == null ? null : [pageToken], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':fetchStaticIps'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return FetchStaticIpsResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Gets information about a location. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Resource name for the location. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Location]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Location> get(core.String name, {core.String? $fields}) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return Location.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Lists information about the supported locations for this service. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - The resource that owns the locations collection, if applicable. |
| /// Value must have pattern `^projects/\[^/\]+$`. |
| /// |
| /// [extraLocationTypes] - Optional. Do not use this field. It is unsupported |
| /// and is ignored unless explicitly documented otherwise. This is primarily |
| /// for internal usage. |
| /// |
| /// [filter] - A filter to narrow down results to a preferred subset. The |
| /// filtering language accepts strings like `"displayName=tokyo"`, and is |
| /// documented in more detail in \[AIP-160\](https://google.aip.dev/160). |
| /// |
| /// [pageSize] - The maximum number of results to return. If not set, the |
| /// service selects a default. |
| /// |
| /// [pageToken] - A page token received from the `next_page_token` field in |
| /// the response. Send that page token to receive the subsequent page. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [ListLocationsResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<ListLocationsResponse> list( |
| core.String name, { |
| core.List<core.String>? extraLocationTypes, |
| core.String? filter, |
| core.int? pageSize, |
| core.String? pageToken, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'extraLocationTypes': ?extraLocationTypes, |
| 'filter': ?filter == null ? null : [filter], |
| 'pageSize': ?pageSize == null ? null : ['${pageSize}'], |
| 'pageToken': ?pageToken == null ? null : [pageToken], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/locations'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return ListLocationsResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| } |
| |
| class ProjectsLocationsConnectionProfilesResource { |
| final commons.ApiRequester _requester; |
| |
| ProjectsLocationsConnectionProfilesResource(commons.ApiRequester client) |
| : _requester = client; |
| |
| /// Use this method to create a connection profile in a project and location. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent that owns the collection of |
| /// ConnectionProfiles. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [connectionProfileId] - Required. The connection profile identifier. |
| /// |
| /// [force] - Optional. Create the connection profile without validating it. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes since the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [validateOnly] - Optional. Only validate the connection profile, but don't |
| /// create any resources. The default is false. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> create( |
| ConnectionProfile request, |
| core.String parent, { |
| core.String? connectionProfileId, |
| core.bool? force, |
| core.String? requestId, |
| core.bool? validateOnly, |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'connectionProfileId': ?connectionProfileId == null |
| ? null |
| : [connectionProfileId], |
| 'force': ?force == null ? null : ['${force}'], |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'validateOnly': ?validateOnly == null ? null : ['${validateOnly}'], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/connectionProfiles'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to delete a connection profile. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the connection profile resource to delete. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/connectionProfiles/\[^/\]+$`. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes after the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> delete( |
| core.String name, { |
| core.String? requestId, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'DELETE', |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to discover a connection profile. |
| /// |
| /// The discover API call exposes the data objects and metadata belonging to |
| /// the profile. Typically, a request returns children data objects of a |
| /// parent data object that's optionally supplied in the request. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent resource of the connection profile type. |
| /// Must be in the format `projects / * /locations / * `. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [DiscoverConnectionProfileResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<DiscoverConnectionProfileResponse> discover( |
| DiscoverConnectionProfileRequest request, |
| core.String parent, { |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = |
| 'v1/' + core.Uri.encodeFull('$parent') + '/connectionProfiles:discover'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return DiscoverConnectionProfileResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to get details about a connection profile. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the connection profile resource to get. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/connectionProfiles/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [ConnectionProfile]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<ConnectionProfile> get( |
| core.String name, { |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return ConnectionProfile.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to list connection profiles created in a project and |
| /// location. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent that owns the collection of connection |
| /// profiles. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [filter] - Optional. Filter request. |
| /// |
| /// [orderBy] - Optional. Order by fields for the result. |
| /// |
| /// [pageSize] - Optional. Maximum number of connection profiles to return. If |
| /// unspecified, at most 50 connection profiles will be returned. The maximum |
| /// value is 1000; values above 1000 will be coerced to 1000. |
| /// |
| /// [pageToken] - Optional. Page token received from a previous |
| /// `ListConnectionProfiles` call. Provide this to retrieve the subsequent |
| /// page. When paginating, all other parameters provided to |
| /// `ListConnectionProfiles` must match the call that provided the page token. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [ListConnectionProfilesResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<ListConnectionProfilesResponse> list( |
| core.String parent, { |
| core.String? filter, |
| core.String? orderBy, |
| core.int? pageSize, |
| core.String? pageToken, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'filter': ?filter == null ? null : [filter], |
| 'orderBy': ?orderBy == null ? null : [orderBy], |
| 'pageSize': ?pageSize == null ? null : ['${pageSize}'], |
| 'pageToken': ?pageToken == null ? null : [pageToken], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/connectionProfiles'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return ListConnectionProfilesResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to update the parameters of a connection profile. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Output only. Identifier. The resource's name. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/connectionProfiles/\[^/\]+$`. |
| /// |
| /// [force] - Optional. Update the connection profile without validating it. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes since the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [updateMask] - Optional. Field mask is used to specify the fields to be |
| /// overwritten in the ConnectionProfile resource by the update. The fields |
| /// specified in the update_mask are relative to the resource, not the full |
| /// request. A field will be overwritten if it is in the mask. If the user |
| /// does not provide a mask then all fields will be overwritten. |
| /// |
| /// [validateOnly] - Optional. Only validate the connection profile, but don't |
| /// update any resources. The default is false. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> patch( |
| ConnectionProfile request, |
| core.String name, { |
| core.bool? force, |
| core.String? requestId, |
| core.String? updateMask, |
| core.bool? validateOnly, |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'force': ?force == null ? null : ['${force}'], |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'updateMask': ?updateMask == null ? null : [updateMask], |
| 'validateOnly': ?validateOnly == null ? null : ['${validateOnly}'], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'PATCH', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| } |
| |
| class ProjectsLocationsOperationsResource { |
| final commons.ApiRequester _requester; |
| |
| ProjectsLocationsOperationsResource(commons.ApiRequester client) |
| : _requester = client; |
| |
| /// Starts asynchronous cancellation on a long-running operation. |
| /// |
| /// The server makes a best effort to cancel the operation, but success is not |
| /// guaranteed. If the server doesn't support this method, it returns |
| /// `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation |
| /// or other methods to check whether the cancellation succeeded or whether |
| /// the operation completed despite cancellation. On successful cancellation, |
| /// the operation is not deleted; instead, it becomes an operation with an |
| /// Operation.error value with a google.rpc.Status.code of `1`, corresponding |
| /// to `Code.CANCELLED`. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - The name of the operation resource to be cancelled. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Empty]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Empty> cancel( |
| CancelOperationRequest request, |
| core.String name, { |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':cancel'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return Empty.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Deletes a long-running operation. |
| /// |
| /// This method indicates that the client is no longer interested in the |
| /// operation result. It does not cancel the operation. If the server doesn't |
| /// support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - The name of the operation resource to be deleted. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Empty]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Empty> delete(core.String name, {core.String? $fields}) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'DELETE', |
| queryParams: queryParams_, |
| ); |
| return Empty.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Gets the latest state of a long-running operation. |
| /// |
| /// Clients can use this method to poll the operation result at intervals as |
| /// recommended by the API service. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - The name of the operation resource. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/operations/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> get(core.String name, {core.String? $fields}) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Lists operations that match the specified filter in the request. |
| /// |
| /// If the server doesn't support this method, it returns `UNIMPLEMENTED`. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - The name of the operation's parent resource. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [filter] - The standard list filter. |
| /// |
| /// [pageSize] - The standard list page size. |
| /// |
| /// [pageToken] - The standard list page token. |
| /// |
| /// [returnPartialSuccess] - When set to `true`, operations that are reachable |
| /// are returned as normal, and those that are unreachable are returned in the |
| /// ListOperationsResponse.unreachable field. This can only be `true` when |
| /// reading across collections. For example, when `parent` is set to |
| /// `"projects/example/locations/-"`. This field is not supported by default |
| /// and will result in an `UNIMPLEMENTED` error if set unless explicitly |
| /// documented otherwise in service or product specific documentation. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [ListOperationsResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<ListOperationsResponse> list( |
| core.String name, { |
| core.String? filter, |
| core.int? pageSize, |
| core.String? pageToken, |
| core.bool? returnPartialSuccess, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'filter': ?filter == null ? null : [filter], |
| 'pageSize': ?pageSize == null ? null : ['${pageSize}'], |
| 'pageToken': ?pageToken == null ? null : [pageToken], |
| 'returnPartialSuccess': ?returnPartialSuccess == null |
| ? null |
| : ['${returnPartialSuccess}'], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name') + '/operations'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return ListOperationsResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| } |
| |
| class ProjectsLocationsPrivateConnectionsResource { |
| final commons.ApiRequester _requester; |
| |
| ProjectsLocationsPrivateConnectionsRoutesResource get routes => |
| ProjectsLocationsPrivateConnectionsRoutesResource(_requester); |
| |
| ProjectsLocationsPrivateConnectionsResource(commons.ApiRequester client) |
| : _requester = client; |
| |
| /// Use this method to create a private connectivity configuration. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent that owns the collection of |
| /// PrivateConnections. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [force] - Optional. If set to true, will skip validations. |
| /// |
| /// [privateConnectionId] - Required. The private connectivity identifier. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes since the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [validateOnly] - Optional. When supplied with PSC Interface config, will |
| /// get/create the tenant project required for the customer to allow list and |
| /// won't actually create the private connection. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> create( |
| PrivateConnection request, |
| core.String parent, { |
| core.bool? force, |
| core.String? privateConnectionId, |
| core.String? requestId, |
| core.bool? validateOnly, |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'force': ?force == null ? null : ['${force}'], |
| 'privateConnectionId': ?privateConnectionId == null |
| ? null |
| : [privateConnectionId], |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'validateOnly': ?validateOnly == null ? null : ['${validateOnly}'], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/privateConnections'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to delete a private connectivity configuration. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the private connectivity configuration to |
| /// delete. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/privateConnections/\[^/\]+$`. |
| /// |
| /// [force] - Optional. If set to true, any child routes that belong to this |
| /// PrivateConnection will also be deleted. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes after the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> delete( |
| core.String name, { |
| core.bool? force, |
| core.String? requestId, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'force': ?force == null ? null : ['${force}'], |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'DELETE', |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to get details about a private connectivity configuration. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the private connectivity configuration to |
| /// get. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/privateConnections/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [PrivateConnection]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<PrivateConnection> get( |
| core.String name, { |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return PrivateConnection.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to list private connectivity configurations in a project |
| /// and location. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent that owns the collection of private |
| /// connectivity configurations. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [filter] - Optional. Filter request. |
| /// |
| /// [orderBy] - Optional. Order by fields for the result. |
| /// |
| /// [pageSize] - Maximum number of private connectivity configurations to |
| /// return. If unspecified, at most 50 private connectivity configurations |
| /// that will be returned. The maximum value is 1000; values above 1000 will |
| /// be coerced to 1000. |
| /// |
| /// [pageToken] - Optional. Page token received from a previous |
| /// `ListPrivateConnections` call. Provide this to retrieve the subsequent |
| /// page. When paginating, all other parameters provided to |
| /// `ListPrivateConnections` must match the call that provided the page token. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [ListPrivateConnectionsResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<ListPrivateConnectionsResponse> list( |
| core.String parent, { |
| core.String? filter, |
| core.String? orderBy, |
| core.int? pageSize, |
| core.String? pageToken, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'filter': ?filter == null ? null : [filter], |
| 'orderBy': ?orderBy == null ? null : [orderBy], |
| 'pageSize': ?pageSize == null ? null : ['${pageSize}'], |
| 'pageToken': ?pageToken == null ? null : [pageToken], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/privateConnections'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return ListPrivateConnectionsResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| } |
| |
| class ProjectsLocationsPrivateConnectionsRoutesResource { |
| final commons.ApiRequester _requester; |
| |
| ProjectsLocationsPrivateConnectionsRoutesResource(commons.ApiRequester client) |
| : _requester = client; |
| |
| /// Use this method to create a route for a private connectivity configuration |
| /// in a project and location. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent that owns the collection of Routes. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/privateConnections/\[^/\]+$`. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes since the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [routeId] - Required. The Route identifier. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> create( |
| Route request, |
| core.String parent, { |
| core.String? requestId, |
| core.String? routeId, |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'routeId': ?routeId == null ? null : [routeId], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/routes'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to delete a route. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the Route resource to delete. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/privateConnections/\[^/\]+/routes/\[^/\]+$`. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes after the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> delete( |
| core.String name, { |
| core.String? requestId, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'DELETE', |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to get details about a route. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the Route resource to get. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/privateConnections/\[^/\]+/routes/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Route]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Route> get(core.String name, {core.String? $fields}) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return Route.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to list routes created for a private connectivity |
| /// configuration in a project and location. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent that owns the collection of Routess. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/privateConnections/\[^/\]+$`. |
| /// |
| /// [filter] - Optional. Filter request. |
| /// |
| /// [orderBy] - Optional. Order by fields for the result. |
| /// |
| /// [pageSize] - Optional. Maximum number of Routes to return. The service may |
| /// return fewer than this value. If unspecified, at most 50 Routes will be |
| /// returned. The maximum value is 1000; values above 1000 will be coerced to |
| /// 1000. |
| /// |
| /// [pageToken] - Optional. Page token received from a previous `ListRoutes` |
| /// call. Provide this to retrieve the subsequent page. When paginating, all |
| /// other parameters provided to `ListRoutes` must match the call that |
| /// provided the page token. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [ListRoutesResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<ListRoutesResponse> list( |
| core.String parent, { |
| core.String? filter, |
| core.String? orderBy, |
| core.int? pageSize, |
| core.String? pageToken, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'filter': ?filter == null ? null : [filter], |
| 'orderBy': ?orderBy == null ? null : [orderBy], |
| 'pageSize': ?pageSize == null ? null : ['${pageSize}'], |
| 'pageToken': ?pageToken == null ? null : [pageToken], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/routes'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return ListRoutesResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| } |
| |
| class ProjectsLocationsStreamsResource { |
| final commons.ApiRequester _requester; |
| |
| ProjectsLocationsStreamsObjectsResource get objects => |
| ProjectsLocationsStreamsObjectsResource(_requester); |
| |
| ProjectsLocationsStreamsResource(commons.ApiRequester client) |
| : _requester = client; |
| |
| /// Use this method to create a stream. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent that owns the collection of streams. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [force] - Optional. Create the stream without validating it. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes since the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [streamId] - Required. The stream identifier. |
| /// |
| /// [validateOnly] - Optional. Only validate the stream, but don't create any |
| /// resources. The default is false. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> create( |
| Stream request, |
| core.String parent, { |
| core.bool? force, |
| core.String? requestId, |
| core.String? streamId, |
| core.bool? validateOnly, |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'force': ?force == null ? null : ['${force}'], |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'streamId': ?streamId == null ? null : [streamId], |
| 'validateOnly': ?validateOnly == null ? null : ['${validateOnly}'], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/streams'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to delete a stream. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the stream resource to delete. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+$`. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes after the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> delete( |
| core.String name, { |
| core.String? requestId, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'DELETE', |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to get details about a stream. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the stream resource to get. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Stream]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Stream> get(core.String name, {core.String? $fields}) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return Stream.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to list streams in a project and location. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent that owns the collection of streams. |
| /// Value must have pattern `^projects/\[^/\]+/locations/\[^/\]+$`. |
| /// |
| /// [filter] - Optional. Filter request. |
| /// |
| /// [orderBy] - Optional. Order by fields for the result. |
| /// |
| /// [pageSize] - Optional. Maximum number of streams to return. If |
| /// unspecified, at most 50 streams will be returned. The maximum value is |
| /// 1000; values above 1000 will be coerced to 1000. |
| /// |
| /// [pageToken] - Optional. Page token received from a previous `ListStreams` |
| /// call. Provide this to retrieve the subsequent page. When paginating, all |
| /// other parameters provided to `ListStreams` must match the call that |
| /// provided the page token. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [ListStreamsResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<ListStreamsResponse> list( |
| core.String parent, { |
| core.String? filter, |
| core.String? orderBy, |
| core.int? pageSize, |
| core.String? pageToken, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'filter': ?filter == null ? null : [filter], |
| 'orderBy': ?orderBy == null ? null : [orderBy], |
| 'pageSize': ?pageSize == null ? null : ['${pageSize}'], |
| 'pageToken': ?pageToken == null ? null : [pageToken], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/streams'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return ListStreamsResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to update the configuration of a stream. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Output only. Identifier. The stream's name. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+$`. |
| /// |
| /// [force] - Optional. Update the stream without validating it. |
| /// |
| /// [requestId] - Optional. A request ID to identify requests. Specify a |
| /// unique request ID so that if you must retry your request, the server will |
| /// know to ignore the request if it has already been completed. The server |
| /// will guarantee that for at least 60 minutes since the first request. For |
| /// example, consider a situation where you make an initial request and the |
| /// request times out. If you make the request again with the same request ID, |
| /// the server can check if original operation with the same request ID was |
| /// received, and if so, will ignore the second request. This prevents clients |
| /// from accidentally creating duplicate commitments. The request ID must be a |
| /// valid UUID with the exception that zero UUID is not supported |
| /// (00000000-0000-0000-0000-000000000000). |
| /// |
| /// [updateMask] - Optional. Field mask is used to specify the fields to be |
| /// overwritten in the stream resource by the update. The fields specified in |
| /// the update_mask are relative to the resource, not the full request. A |
| /// field will be overwritten if it is in the mask. If the user does not |
| /// provide a mask then all fields will be overwritten. |
| /// |
| /// [validateOnly] - Optional. Only validate the stream with the changes, |
| /// without actually updating it. The default is false. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> patch( |
| Stream request, |
| core.String name, { |
| core.bool? force, |
| core.String? requestId, |
| core.String? updateMask, |
| core.bool? validateOnly, |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'force': ?force == null ? null : ['${force}'], |
| 'requestId': ?requestId == null ? null : [requestId], |
| 'updateMask': ?updateMask == null ? null : [updateMask], |
| 'validateOnly': ?validateOnly == null ? null : ['${validateOnly}'], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'PATCH', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| |
| /// Use this method to start, resume or recover a stream with a non default |
| /// CDC strategy. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. Name of the stream resource to start, in the format: |
| /// projects/{project_id}/locations/{location}/streams/{stream_name} |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [Operation]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<Operation> run( |
| RunStreamRequest request, |
| core.String name, { |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name') + ':run'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return Operation.fromJson(response_ as core.Map<core.String, core.dynamic>); |
| } |
| } |
| |
| class ProjectsLocationsStreamsObjectsResource { |
| final commons.ApiRequester _requester; |
| |
| ProjectsLocationsStreamsObjectsResource(commons.ApiRequester client) |
| : _requester = client; |
| |
| /// Use this method to get details about a stream object. |
| /// |
| /// Request parameters: |
| /// |
| /// [name] - Required. The name of the stream object resource to get. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+/objects/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [StreamObject]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<StreamObject> get( |
| core.String name, { |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$name'); |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return StreamObject.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to list the objects of a specific stream. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent stream that owns the collection of |
| /// objects. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+$`. |
| /// |
| /// [pageSize] - Optional. Maximum number of objects to return. Default is 50. |
| /// The maximum value is 1000; values above 1000 will be coerced to 1000. |
| /// |
| /// [pageToken] - Optional. Page token received from a previous |
| /// `ListStreamObjectsRequest` call. Provide this to retrieve the subsequent |
| /// page. When paginating, all other parameters provided to |
| /// `ListStreamObjectsRequest` must match the call that provided the page |
| /// token. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [ListStreamObjectsResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<ListStreamObjectsResponse> list( |
| core.String parent, { |
| core.int? pageSize, |
| core.String? pageToken, |
| core.String? $fields, |
| }) async { |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'pageSize': ?pageSize == null ? null : ['${pageSize}'], |
| 'pageToken': ?pageToken == null ? null : [pageToken], |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/objects'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'GET', |
| queryParams: queryParams_, |
| ); |
| return ListStreamObjectsResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to look up a stream object by its source object |
| /// identifier. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [parent] - Required. The parent stream that owns the collection of |
| /// objects. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [StreamObject]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<StreamObject> lookup( |
| LookupStreamObjectRequest request, |
| core.String parent, { |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$parent') + '/objects:lookup'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return StreamObject.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to start a backfill job for the specified stream object. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [object] - Required. The name of the stream object resource to start a |
| /// backfill job for. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+/objects/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [StartBackfillJobResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<StartBackfillJobResponse> startBackfillJob( |
| StartBackfillJobRequest request, |
| core.String object, { |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$object') + ':startBackfillJob'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return StartBackfillJobResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| |
| /// Use this method to stop a backfill job for the specified stream object. |
| /// |
| /// [request] - The metadata request object. |
| /// |
| /// Request parameters: |
| /// |
| /// [object] - Required. The name of the stream object resource to stop the |
| /// backfill job for. |
| /// Value must have pattern |
| /// `^projects/\[^/\]+/locations/\[^/\]+/streams/\[^/\]+/objects/\[^/\]+$`. |
| /// |
| /// [$fields] - Selector specifying which fields to include in a partial |
| /// response. |
| /// |
| /// Completes with a [StopBackfillJobResponse]. |
| /// |
| /// Completes with a [commons.ApiRequestError] if the API endpoint returned an |
| /// error. |
| /// |
| /// If the used [http.Client] completes with an error when making a REST call, |
| /// this method will complete with the same error. |
| async.Future<StopBackfillJobResponse> stopBackfillJob( |
| StopBackfillJobRequest request, |
| core.String object, { |
| core.String? $fields, |
| }) async { |
| final body_ = convert.json.encode(request); |
| final queryParams_ = <core.String, core.List<core.String>>{ |
| 'fields': ?$fields == null ? null : [$fields], |
| }; |
| |
| final url_ = 'v1/' + core.Uri.encodeFull('$object') + ':stopBackfillJob'; |
| |
| final response_ = await _requester.request( |
| url_, |
| 'POST', |
| body: body_, |
| queryParams: queryParams_, |
| ); |
| return StopBackfillJobResponse.fromJson( |
| response_ as core.Map<core.String, core.dynamic>, |
| ); |
| } |
| } |
| |
| /// AppendOnly mode defines that all changes to a table will be written to the |
| /// destination table. |
| typedef AppendOnly = $Empty; |
| |
| /// AVRO file format configuration. |
| typedef AvroFileFormat = $Empty; |
| |
| /// Backfill strategy to automatically backfill the Stream's objects. |
| /// |
| /// Specific objects can be excluded. |
| class BackfillAllStrategy { |
| /// MongoDB data source objects to avoid backfilling |
| MongodbCluster? mongodbExcludedObjects; |
| |
| /// MySQL data source objects to avoid backfilling. |
| MysqlRdbms? mysqlExcludedObjects; |
| |
| /// Oracle data source objects to avoid backfilling. |
| OracleRdbms? oracleExcludedObjects; |
| |
| /// PostgreSQL data source objects to avoid backfilling. |
| PostgresqlRdbms? postgresqlExcludedObjects; |
| |
| /// Salesforce data source objects to avoid backfilling |
| SalesforceOrg? salesforceExcludedObjects; |
| |
| /// Spanner data source objects to avoid backfilling. |
| SpannerDatabase? spannerExcludedObjects; |
| |
| /// SQLServer data source objects to avoid backfilling |
| SqlServerRdbms? sqlServerExcludedObjects; |
| |
| BackfillAllStrategy({ |
| this.mongodbExcludedObjects, |
| this.mysqlExcludedObjects, |
| this.oracleExcludedObjects, |
| this.postgresqlExcludedObjects, |
| this.salesforceExcludedObjects, |
| this.spannerExcludedObjects, |
| this.sqlServerExcludedObjects, |
| }); |
| |
| BackfillAllStrategy.fromJson(core.Map json_) |
| : this( |
| mongodbExcludedObjects: json_.containsKey('mongodbExcludedObjects') |
| ? MongodbCluster.fromJson( |
| json_['mongodbExcludedObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| mysqlExcludedObjects: json_.containsKey('mysqlExcludedObjects') |
| ? MysqlRdbms.fromJson( |
| json_['mysqlExcludedObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| oracleExcludedObjects: json_.containsKey('oracleExcludedObjects') |
| ? OracleRdbms.fromJson( |
| json_['oracleExcludedObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| postgresqlExcludedObjects: |
| json_.containsKey('postgresqlExcludedObjects') |
| ? PostgresqlRdbms.fromJson( |
| json_['postgresqlExcludedObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| salesforceExcludedObjects: |
| json_.containsKey('salesforceExcludedObjects') |
| ? SalesforceOrg.fromJson( |
| json_['salesforceExcludedObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| spannerExcludedObjects: json_.containsKey('spannerExcludedObjects') |
| ? SpannerDatabase.fromJson( |
| json_['spannerExcludedObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sqlServerExcludedObjects: json_.containsKey('sqlServerExcludedObjects') |
| ? SqlServerRdbms.fromJson( |
| json_['sqlServerExcludedObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final mongodbExcludedObjects = this.mongodbExcludedObjects; |
| final mysqlExcludedObjects = this.mysqlExcludedObjects; |
| final oracleExcludedObjects = this.oracleExcludedObjects; |
| final postgresqlExcludedObjects = this.postgresqlExcludedObjects; |
| final salesforceExcludedObjects = this.salesforceExcludedObjects; |
| final spannerExcludedObjects = this.spannerExcludedObjects; |
| final sqlServerExcludedObjects = this.sqlServerExcludedObjects; |
| return { |
| 'mongodbExcludedObjects': ?mongodbExcludedObjects, |
| 'mysqlExcludedObjects': ?mysqlExcludedObjects, |
| 'oracleExcludedObjects': ?oracleExcludedObjects, |
| 'postgresqlExcludedObjects': ?postgresqlExcludedObjects, |
| 'salesforceExcludedObjects': ?salesforceExcludedObjects, |
| 'spannerExcludedObjects': ?spannerExcludedObjects, |
| 'sqlServerExcludedObjects': ?sqlServerExcludedObjects, |
| }; |
| } |
| } |
| |
| /// Represents a backfill job on a specific stream object. |
| class BackfillJob { |
| /// Errors which caused the backfill job to fail. |
| /// |
| /// Output only. |
| core.List<Error>? errors; |
| |
| /// Backfill job's end time. |
| /// |
| /// Output only. |
| core.String? lastEndTime; |
| |
| /// Backfill job's start time. |
| /// |
| /// Output only. |
| core.String? lastStartTime; |
| |
| /// Backfill job state. |
| /// |
| /// Output only. |
| /// Possible string values are: |
| /// - "STATE_UNSPECIFIED" : Default value. |
| /// - "NOT_STARTED" : Backfill job was never started for the stream object |
| /// (stream has backfill strategy defined as manual or object was explicitly |
| /// excluded from automatic backfill). |
| /// - "PENDING" : Backfill job will start pending available resources. |
| /// - "ACTIVE" : Backfill job is running. |
| /// - "STOPPED" : Backfill job stopped (next job run will start from |
| /// beginning). |
| /// - "FAILED" : Backfill job failed (due to an error). |
| /// - "COMPLETED" : Backfill completed successfully. |
| /// - "UNSUPPORTED" : Backfill job failed since the table structure is |
| /// currently unsupported for backfill. |
| core.String? state; |
| |
| /// Backfill job's triggering reason. |
| /// Possible string values are: |
| /// - "TRIGGER_UNSPECIFIED" : Default value. |
| /// - "AUTOMATIC" : Object backfill job was triggered automatically according |
| /// to the stream's backfill strategy. |
| /// - "MANUAL" : Object backfill job was triggered manually using the |
| /// dedicated API. |
| core.String? trigger; |
| |
| BackfillJob({ |
| this.errors, |
| this.lastEndTime, |
| this.lastStartTime, |
| this.state, |
| this.trigger, |
| }); |
| |
| BackfillJob.fromJson(core.Map json_) |
| : this( |
| errors: (json_['errors'] as core.List?) |
| ?.map( |
| (value) => |
| Error.fromJson(value as core.Map<core.String, core.dynamic>), |
| ) |
| .toList(), |
| lastEndTime: json_['lastEndTime'] as core.String?, |
| lastStartTime: json_['lastStartTime'] as core.String?, |
| state: json_['state'] as core.String?, |
| trigger: json_['trigger'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final errors = this.errors; |
| final lastEndTime = this.lastEndTime; |
| final lastStartTime = this.lastStartTime; |
| final state = this.state; |
| final trigger = this.trigger; |
| return { |
| 'errors': ?errors, |
| 'lastEndTime': ?lastEndTime, |
| 'lastStartTime': ?lastStartTime, |
| 'state': ?state, |
| 'trigger': ?trigger, |
| }; |
| } |
| } |
| |
| /// Backfill strategy to disable automatic backfill for the Stream's objects. |
| typedef BackfillNoneStrategy = $Empty; |
| |
| /// Message to represent the option where Datastream will enforce encryption |
| /// without authenticating server identity. |
| /// |
| /// Server certificates will be trusted by default. |
| typedef BasicEncryption = $Empty; |
| |
| /// BigQuery clustering configuration. |
| class BigQueryClustering { |
| /// Column names to set as clustering columns. |
| /// |
| /// Required. |
| core.List<core.String>? columns; |
| |
| BigQueryClustering({this.columns}); |
| |
| BigQueryClustering.fromJson(core.Map json_) |
| : this( |
| columns: (json_['columns'] as core.List?) |
| ?.map((value) => value as core.String) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final columns = this.columns; |
| return {'columns': ?columns}; |
| } |
| } |
| |
| /// BigQuery destination configuration |
| class BigQueryDestinationConfig { |
| /// Append only mode |
| AppendOnly? appendOnly; |
| |
| /// Big Lake Managed Tables (BLMT) configuration. |
| /// |
| /// Optional. |
| BlmtConfig? blmtConfig; |
| |
| /// The guaranteed data freshness (in seconds) when querying tables created by |
| /// the stream. |
| /// |
| /// Editing this field will only affect new tables created in the future, but |
| /// existing tables will not be impacted. Lower values mean that queries will |
| /// return fresher data, but may result in higher cost. |
| core.String? dataFreshness; |
| |
| /// The standard mode |
| Merge? merge; |
| |
| /// Single destination dataset. |
| SingleTargetDataset? singleTargetDataset; |
| |
| /// Source hierarchy datasets. |
| SourceHierarchyDatasets? sourceHierarchyDatasets; |
| |
| BigQueryDestinationConfig({ |
| this.appendOnly, |
| this.blmtConfig, |
| this.dataFreshness, |
| this.merge, |
| this.singleTargetDataset, |
| this.sourceHierarchyDatasets, |
| }); |
| |
| BigQueryDestinationConfig.fromJson(core.Map json_) |
| : this( |
| appendOnly: json_.containsKey('appendOnly') |
| ? AppendOnly.fromJson( |
| json_['appendOnly'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| blmtConfig: json_.containsKey('blmtConfig') |
| ? BlmtConfig.fromJson( |
| json_['blmtConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| dataFreshness: json_['dataFreshness'] as core.String?, |
| merge: json_.containsKey('merge') |
| ? Merge.fromJson( |
| json_['merge'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| singleTargetDataset: json_.containsKey('singleTargetDataset') |
| ? SingleTargetDataset.fromJson( |
| json_['singleTargetDataset'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sourceHierarchyDatasets: json_.containsKey('sourceHierarchyDatasets') |
| ? SourceHierarchyDatasets.fromJson( |
| json_['sourceHierarchyDatasets'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final appendOnly = this.appendOnly; |
| final blmtConfig = this.blmtConfig; |
| final dataFreshness = this.dataFreshness; |
| final merge = this.merge; |
| final singleTargetDataset = this.singleTargetDataset; |
| final sourceHierarchyDatasets = this.sourceHierarchyDatasets; |
| return { |
| 'appendOnly': ?appendOnly, |
| 'blmtConfig': ?blmtConfig, |
| 'dataFreshness': ?dataFreshness, |
| 'merge': ?merge, |
| 'singleTargetDataset': ?singleTargetDataset, |
| 'sourceHierarchyDatasets': ?sourceHierarchyDatasets, |
| }; |
| } |
| } |
| |
| /// BigQuery partitioning configuration. |
| class BigQueryPartitioning { |
| /// Ingestion time partitioning. |
| IngestionTimePartition? ingestionTimePartition; |
| |
| /// Integer range partitioning. |
| IntegerRangePartition? integerRangePartition; |
| |
| /// If true, queries over the table require a partition filter. |
| /// |
| /// Optional. |
| core.bool? requirePartitionFilter; |
| |
| /// Time unit column partitioning. |
| TimeUnitPartition? timeUnitPartition; |
| |
| BigQueryPartitioning({ |
| this.ingestionTimePartition, |
| this.integerRangePartition, |
| this.requirePartitionFilter, |
| this.timeUnitPartition, |
| }); |
| |
| BigQueryPartitioning.fromJson(core.Map json_) |
| : this( |
| ingestionTimePartition: json_.containsKey('ingestionTimePartition') |
| ? IngestionTimePartition.fromJson( |
| json_['ingestionTimePartition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| integerRangePartition: json_.containsKey('integerRangePartition') |
| ? IntegerRangePartition.fromJson( |
| json_['integerRangePartition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| requirePartitionFilter: json_['requirePartitionFilter'] as core.bool?, |
| timeUnitPartition: json_.containsKey('timeUnitPartition') |
| ? TimeUnitPartition.fromJson( |
| json_['timeUnitPartition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final ingestionTimePartition = this.ingestionTimePartition; |
| final integerRangePartition = this.integerRangePartition; |
| final requirePartitionFilter = this.requirePartitionFilter; |
| final timeUnitPartition = this.timeUnitPartition; |
| return { |
| 'ingestionTimePartition': ?ingestionTimePartition, |
| 'integerRangePartition': ?integerRangePartition, |
| 'requirePartitionFilter': ?requirePartitionFilter, |
| 'timeUnitPartition': ?timeUnitPartition, |
| }; |
| } |
| } |
| |
| /// Profile for connecting to a BigQuery destination. |
| typedef BigQueryProfile = $Empty; |
| |
| /// Configuration to use Binary Log Parser CDC technique. |
| class BinaryLogParser { |
| /// Use Oracle directories. |
| LogFileDirectories? logFileDirectories; |
| |
| /// Use Oracle ASM. |
| OracleAsmLogFileAccess? oracleAsmLogFileAccess; |
| |
| BinaryLogParser({this.logFileDirectories, this.oracleAsmLogFileAccess}); |
| |
| BinaryLogParser.fromJson(core.Map json_) |
| : this( |
| logFileDirectories: json_.containsKey('logFileDirectories') |
| ? LogFileDirectories.fromJson( |
| json_['logFileDirectories'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| oracleAsmLogFileAccess: json_.containsKey('oracleAsmLogFileAccess') |
| ? OracleAsmLogFileAccess.fromJson( |
| json_['oracleAsmLogFileAccess'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final logFileDirectories = this.logFileDirectories; |
| final oracleAsmLogFileAccess = this.oracleAsmLogFileAccess; |
| return { |
| 'logFileDirectories': ?logFileDirectories, |
| 'oracleAsmLogFileAccess': ?oracleAsmLogFileAccess, |
| }; |
| } |
| } |
| |
| /// Use Binary log position based replication. |
| typedef BinaryLogPosition = $Empty; |
| |
| /// The configuration for BLMT. |
| class BlmtConfig { |
| /// The Cloud Storage bucket name. |
| /// |
| /// Required. |
| core.String? bucket; |
| |
| /// The bigquery connection. |
| /// |
| /// Format: `{project}.{location}.{name}` |
| /// |
| /// Required. |
| core.String? connectionName; |
| |
| /// The file format. |
| /// |
| /// Required. |
| /// Possible string values are: |
| /// - "FILE_FORMAT_UNSPECIFIED" : Default value. |
| /// - "PARQUET" : Parquet file format. |
| core.String? fileFormat; |
| |
| /// The root path inside the Cloud Storage bucket. |
| core.String? rootPath; |
| |
| /// The table format. |
| /// |
| /// Required. |
| /// Possible string values are: |
| /// - "TABLE_FORMAT_UNSPECIFIED" : Default value. |
| /// - "ICEBERG" : Iceberg table format. |
| core.String? tableFormat; |
| |
| BlmtConfig({ |
| this.bucket, |
| this.connectionName, |
| this.fileFormat, |
| this.rootPath, |
| this.tableFormat, |
| }); |
| |
| BlmtConfig.fromJson(core.Map json_) |
| : this( |
| bucket: json_['bucket'] as core.String?, |
| connectionName: json_['connectionName'] as core.String?, |
| fileFormat: json_['fileFormat'] as core.String?, |
| rootPath: json_['rootPath'] as core.String?, |
| tableFormat: json_['tableFormat'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final bucket = this.bucket; |
| final connectionName = this.connectionName; |
| final fileFormat = this.fileFormat; |
| final rootPath = this.rootPath; |
| final tableFormat = this.tableFormat; |
| return { |
| 'bucket': ?bucket, |
| 'connectionName': ?connectionName, |
| 'fileFormat': ?fileFormat, |
| 'rootPath': ?rootPath, |
| 'tableFormat': ?tableFormat, |
| }; |
| } |
| } |
| |
| /// The request message for Operations.CancelOperation. |
| typedef CancelOperationRequest = $Empty; |
| |
| /// The strategy that the stream uses for CDC replication. |
| class CdcStrategy { |
| /// Start replicating from the most recent position in the source. |
| /// |
| /// Optional. |
| MostRecentStartPosition? mostRecentStartPosition; |
| |
| /// Resume replication from the next available position in the source. |
| /// |
| /// Optional. |
| NextAvailableStartPosition? nextAvailableStartPosition; |
| |
| /// Start replicating from a specific position in the source. |
| /// |
| /// Optional. |
| SpecificStartPosition? specificStartPosition; |
| |
| CdcStrategy({ |
| this.mostRecentStartPosition, |
| this.nextAvailableStartPosition, |
| this.specificStartPosition, |
| }); |
| |
| CdcStrategy.fromJson(core.Map json_) |
| : this( |
| mostRecentStartPosition: json_.containsKey('mostRecentStartPosition') |
| ? MostRecentStartPosition.fromJson( |
| json_['mostRecentStartPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| nextAvailableStartPosition: |
| json_.containsKey('nextAvailableStartPosition') |
| ? NextAvailableStartPosition.fromJson( |
| json_['nextAvailableStartPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| specificStartPosition: json_.containsKey('specificStartPosition') |
| ? SpecificStartPosition.fromJson( |
| json_['specificStartPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final mostRecentStartPosition = this.mostRecentStartPosition; |
| final nextAvailableStartPosition = this.nextAvailableStartPosition; |
| final specificStartPosition = this.specificStartPosition; |
| return { |
| 'mostRecentStartPosition': ?mostRecentStartPosition, |
| 'nextAvailableStartPosition': ?nextAvailableStartPosition, |
| 'specificStartPosition': ?specificStartPosition, |
| }; |
| } |
| } |
| |
| /// A set of reusable connection configurations to be used as a source or |
| /// destination for a stream. |
| class ConnectionProfile { |
| /// Profile for connecting to a BigQuery destination. |
| BigQueryProfile? bigqueryProfile; |
| |
| /// The create time of the resource. |
| /// |
| /// Output only. |
| core.String? createTime; |
| |
| /// Display name. |
| /// |
| /// Required. |
| core.String? displayName; |
| |
| /// Forward SSH tunnel connectivity. |
| ForwardSshTunnelConnectivity? forwardSshConnectivity; |
| |
| /// Profile for connecting to a Cloud Storage destination. |
| GcsProfile? gcsProfile; |
| |
| /// Labels. |
| core.Map<core.String, core.String>? labels; |
| |
| /// Profile for connecting to a MongoDB source. |
| MongodbProfile? mongodbProfile; |
| |
| /// Profile for connecting to a MySQL source. |
| MysqlProfile? mysqlProfile; |
| |
| /// Identifier. |
| /// |
| /// The resource's name. |
| /// |
| /// Output only. |
| core.String? name; |
| |
| /// Profile for connecting to an Oracle source. |
| OracleProfile? oracleProfile; |
| |
| /// Profile for connecting to a PostgreSQL source. |
| PostgresqlProfile? postgresqlProfile; |
| |
| /// Private connectivity. |
| PrivateConnectivity? privateConnectivity; |
| |
| /// Profile for connecting to a Salesforce source. |
| SalesforceProfile? salesforceProfile; |
| |
| /// Reserved for future use. |
| /// |
| /// Output only. |
| core.bool? satisfiesPzi; |
| |
| /// Reserved for future use. |
| /// |
| /// Output only. |
| core.bool? satisfiesPzs; |
| |
| /// Profile for connecting to a Spanner source. |
| SpannerProfile? spannerProfile; |
| |
| /// Profile for connecting to a SQLServer source. |
| SqlServerProfile? sqlServerProfile; |
| |
| /// Static Service IP connectivity. |
| StaticServiceIpConnectivity? staticServiceIpConnectivity; |
| |
| /// The update time of the resource. |
| /// |
| /// Output only. |
| core.String? updateTime; |
| |
| ConnectionProfile({ |
| this.bigqueryProfile, |
| this.createTime, |
| this.displayName, |
| this.forwardSshConnectivity, |
| this.gcsProfile, |
| this.labels, |
| this.mongodbProfile, |
| this.mysqlProfile, |
| this.name, |
| this.oracleProfile, |
| this.postgresqlProfile, |
| this.privateConnectivity, |
| this.salesforceProfile, |
| this.satisfiesPzi, |
| this.satisfiesPzs, |
| this.spannerProfile, |
| this.sqlServerProfile, |
| this.staticServiceIpConnectivity, |
| this.updateTime, |
| }); |
| |
| ConnectionProfile.fromJson(core.Map json_) |
| : this( |
| bigqueryProfile: json_.containsKey('bigqueryProfile') |
| ? BigQueryProfile.fromJson( |
| json_['bigqueryProfile'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| createTime: json_['createTime'] as core.String?, |
| displayName: json_['displayName'] as core.String?, |
| forwardSshConnectivity: json_.containsKey('forwardSshConnectivity') |
| ? ForwardSshTunnelConnectivity.fromJson( |
| json_['forwardSshConnectivity'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| gcsProfile: json_.containsKey('gcsProfile') |
| ? GcsProfile.fromJson( |
| json_['gcsProfile'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| labels: (json_['labels'] as core.Map<core.String, core.dynamic>?)?.map( |
| (key, value) => core.MapEntry(key, value as core.String), |
| ), |
| mongodbProfile: json_.containsKey('mongodbProfile') |
| ? MongodbProfile.fromJson( |
| json_['mongodbProfile'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| mysqlProfile: json_.containsKey('mysqlProfile') |
| ? MysqlProfile.fromJson( |
| json_['mysqlProfile'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| name: json_['name'] as core.String?, |
| oracleProfile: json_.containsKey('oracleProfile') |
| ? OracleProfile.fromJson( |
| json_['oracleProfile'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| postgresqlProfile: json_.containsKey('postgresqlProfile') |
| ? PostgresqlProfile.fromJson( |
| json_['postgresqlProfile'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| privateConnectivity: json_.containsKey('privateConnectivity') |
| ? PrivateConnectivity.fromJson( |
| json_['privateConnectivity'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| salesforceProfile: json_.containsKey('salesforceProfile') |
| ? SalesforceProfile.fromJson( |
| json_['salesforceProfile'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| satisfiesPzi: json_['satisfiesPzi'] as core.bool?, |
| satisfiesPzs: json_['satisfiesPzs'] as core.bool?, |
| spannerProfile: json_.containsKey('spannerProfile') |
| ? SpannerProfile.fromJson( |
| json_['spannerProfile'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sqlServerProfile: json_.containsKey('sqlServerProfile') |
| ? SqlServerProfile.fromJson( |
| json_['sqlServerProfile'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| staticServiceIpConnectivity: |
| json_.containsKey('staticServiceIpConnectivity') |
| ? StaticServiceIpConnectivity.fromJson( |
| json_['staticServiceIpConnectivity'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| updateTime: json_['updateTime'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final bigqueryProfile = this.bigqueryProfile; |
| final createTime = this.createTime; |
| final displayName = this.displayName; |
| final forwardSshConnectivity = this.forwardSshConnectivity; |
| final gcsProfile = this.gcsProfile; |
| final labels = this.labels; |
| final mongodbProfile = this.mongodbProfile; |
| final mysqlProfile = this.mysqlProfile; |
| final name = this.name; |
| final oracleProfile = this.oracleProfile; |
| final postgresqlProfile = this.postgresqlProfile; |
| final privateConnectivity = this.privateConnectivity; |
| final salesforceProfile = this.salesforceProfile; |
| final satisfiesPzi = this.satisfiesPzi; |
| final satisfiesPzs = this.satisfiesPzs; |
| final spannerProfile = this.spannerProfile; |
| final sqlServerProfile = this.sqlServerProfile; |
| final staticServiceIpConnectivity = this.staticServiceIpConnectivity; |
| final updateTime = this.updateTime; |
| return { |
| 'bigqueryProfile': ?bigqueryProfile, |
| 'createTime': ?createTime, |
| 'displayName': ?displayName, |
| 'forwardSshConnectivity': ?forwardSshConnectivity, |
| 'gcsProfile': ?gcsProfile, |
| 'labels': ?labels, |
| 'mongodbProfile': ?mongodbProfile, |
| 'mysqlProfile': ?mysqlProfile, |
| 'name': ?name, |
| 'oracleProfile': ?oracleProfile, |
| 'postgresqlProfile': ?postgresqlProfile, |
| 'privateConnectivity': ?privateConnectivity, |
| 'salesforceProfile': ?salesforceProfile, |
| 'satisfiesPzi': ?satisfiesPzi, |
| 'satisfiesPzs': ?satisfiesPzs, |
| 'spannerProfile': ?spannerProfile, |
| 'sqlServerProfile': ?sqlServerProfile, |
| 'staticServiceIpConnectivity': ?staticServiceIpConnectivity, |
| 'updateTime': ?updateTime, |
| }; |
| } |
| } |
| |
| /// A customization rule to apply to a set of objects. |
| class CustomizationRule { |
| /// BigQuery clustering rule. |
| BigQueryClustering? bigqueryClustering; |
| |
| /// BigQuery partitioning rule. |
| BigQueryPartitioning? bigqueryPartitioning; |
| |
| CustomizationRule({this.bigqueryClustering, this.bigqueryPartitioning}); |
| |
| CustomizationRule.fromJson(core.Map json_) |
| : this( |
| bigqueryClustering: json_.containsKey('bigqueryClustering') |
| ? BigQueryClustering.fromJson( |
| json_['bigqueryClustering'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| bigqueryPartitioning: json_.containsKey('bigqueryPartitioning') |
| ? BigQueryPartitioning.fromJson( |
| json_['bigqueryPartitioning'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final bigqueryClustering = this.bigqueryClustering; |
| final bigqueryPartitioning = this.bigqueryPartitioning; |
| return { |
| 'bigqueryClustering': ?bigqueryClustering, |
| 'bigqueryPartitioning': ?bigqueryPartitioning, |
| }; |
| } |
| } |
| |
| /// Dataset template used for dynamic dataset creation. |
| class DatasetTemplate { |
| /// If supplied, every created dataset will have its name prefixed by the |
| /// provided value. |
| /// |
| /// The prefix and name will be separated by an underscore. i.e. _. |
| core.String? datasetIdPrefix; |
| |
| /// Describes the Cloud KMS encryption key that will be used to protect |
| /// destination BigQuery table. |
| /// |
| /// The BigQuery Service Account associated with your project requires access |
| /// to this encryption key. i.e. |
| /// projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. |
| /// See https://cloud.google.com/bigquery/docs/customer-managed-encryption for |
| /// more information. |
| core.String? kmsKeyName; |
| |
| /// The geographic location where the dataset should reside. |
| /// |
| /// See https://cloud.google.com/bigquery/docs/locations for supported |
| /// locations. |
| /// |
| /// Required. |
| core.String? location; |
| |
| DatasetTemplate({this.datasetIdPrefix, this.kmsKeyName, this.location}); |
| |
| DatasetTemplate.fromJson(core.Map json_) |
| : this( |
| datasetIdPrefix: json_['datasetIdPrefix'] as core.String?, |
| kmsKeyName: json_['kmsKeyName'] as core.String?, |
| location: json_['location'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final datasetIdPrefix = this.datasetIdPrefix; |
| final kmsKeyName = this.kmsKeyName; |
| final location = this.location; |
| return { |
| 'datasetIdPrefix': ?datasetIdPrefix, |
| 'kmsKeyName': ?kmsKeyName, |
| 'location': ?location, |
| }; |
| } |
| } |
| |
| /// The configuration of the stream destination. |
| class DestinationConfig { |
| /// BigQuery destination configuration. |
| BigQueryDestinationConfig? bigqueryDestinationConfig; |
| |
| /// Destination connection profile resource. |
| /// |
| /// Format: |
| /// `projects/{project}/locations/{location}/connectionProfiles/{name}` |
| /// |
| /// Required. |
| core.String? destinationConnectionProfile; |
| |
| /// A configuration for how data should be loaded to Cloud Storage. |
| GcsDestinationConfig? gcsDestinationConfig; |
| |
| DestinationConfig({ |
| this.bigqueryDestinationConfig, |
| this.destinationConnectionProfile, |
| this.gcsDestinationConfig, |
| }); |
| |
| DestinationConfig.fromJson(core.Map json_) |
| : this( |
| bigqueryDestinationConfig: |
| json_.containsKey('bigqueryDestinationConfig') |
| ? BigQueryDestinationConfig.fromJson( |
| json_['bigqueryDestinationConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| destinationConnectionProfile: |
| json_['destinationConnectionProfile'] as core.String?, |
| gcsDestinationConfig: json_.containsKey('gcsDestinationConfig') |
| ? GcsDestinationConfig.fromJson( |
| json_['gcsDestinationConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final bigqueryDestinationConfig = this.bigqueryDestinationConfig; |
| final destinationConnectionProfile = this.destinationConnectionProfile; |
| final gcsDestinationConfig = this.gcsDestinationConfig; |
| return { |
| 'bigqueryDestinationConfig': ?bigqueryDestinationConfig, |
| 'destinationConnectionProfile': ?destinationConnectionProfile, |
| 'gcsDestinationConfig': ?gcsDestinationConfig, |
| }; |
| } |
| } |
| |
| /// Request message for 'discover' ConnectionProfile request. |
| class DiscoverConnectionProfileRequest { |
| /// An ad-hoc connection profile configuration. |
| /// |
| /// Optional. |
| ConnectionProfile? connectionProfile; |
| |
| /// A reference to an existing connection profile. |
| /// |
| /// Optional. |
| core.String? connectionProfileName; |
| |
| /// Whether to retrieve the full hierarchy of data objects (TRUE) or only the |
| /// current level (FALSE). |
| /// |
| /// Optional. |
| core.bool? fullHierarchy; |
| |
| /// The number of hierarchy levels below the current level to be retrieved. |
| /// |
| /// Optional. |
| core.int? hierarchyDepth; |
| |
| /// MongoDB cluster to enrich with child data objects and metadata. |
| /// |
| /// Optional. |
| MongodbCluster? mongodbCluster; |
| |
| /// MySQL RDBMS to enrich with child data objects and metadata. |
| /// |
| /// Optional. |
| MysqlRdbms? mysqlRdbms; |
| |
| /// Oracle RDBMS to enrich with child data objects and metadata. |
| /// |
| /// Optional. |
| OracleRdbms? oracleRdbms; |
| |
| /// PostgreSQL RDBMS to enrich with child data objects and metadata. |
| /// |
| /// Optional. |
| PostgresqlRdbms? postgresqlRdbms; |
| |
| /// Salesforce organization to enrich with child data objects and metadata. |
| /// |
| /// Optional. |
| SalesforceOrg? salesforceOrg; |
| |
| /// Spanner database to enrich with child data objects and metadata. |
| /// |
| /// Optional. |
| SpannerDatabase? spannerDatabase; |
| |
| /// SQLServer RDBMS to enrich with child data objects and metadata. |
| /// |
| /// Optional. |
| SqlServerRdbms? sqlServerRdbms; |
| |
| DiscoverConnectionProfileRequest({ |
| this.connectionProfile, |
| this.connectionProfileName, |
| this.fullHierarchy, |
| this.hierarchyDepth, |
| this.mongodbCluster, |
| this.mysqlRdbms, |
| this.oracleRdbms, |
| this.postgresqlRdbms, |
| this.salesforceOrg, |
| this.spannerDatabase, |
| this.sqlServerRdbms, |
| }); |
| |
| DiscoverConnectionProfileRequest.fromJson(core.Map json_) |
| : this( |
| connectionProfile: json_.containsKey('connectionProfile') |
| ? ConnectionProfile.fromJson( |
| json_['connectionProfile'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| connectionProfileName: json_['connectionProfileName'] as core.String?, |
| fullHierarchy: json_['fullHierarchy'] as core.bool?, |
| hierarchyDepth: json_['hierarchyDepth'] as core.int?, |
| mongodbCluster: json_.containsKey('mongodbCluster') |
| ? MongodbCluster.fromJson( |
| json_['mongodbCluster'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| mysqlRdbms: json_.containsKey('mysqlRdbms') |
| ? MysqlRdbms.fromJson( |
| json_['mysqlRdbms'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| oracleRdbms: json_.containsKey('oracleRdbms') |
| ? OracleRdbms.fromJson( |
| json_['oracleRdbms'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| postgresqlRdbms: json_.containsKey('postgresqlRdbms') |
| ? PostgresqlRdbms.fromJson( |
| json_['postgresqlRdbms'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| salesforceOrg: json_.containsKey('salesforceOrg') |
| ? SalesforceOrg.fromJson( |
| json_['salesforceOrg'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| spannerDatabase: json_.containsKey('spannerDatabase') |
| ? SpannerDatabase.fromJson( |
| json_['spannerDatabase'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sqlServerRdbms: json_.containsKey('sqlServerRdbms') |
| ? SqlServerRdbms.fromJson( |
| json_['sqlServerRdbms'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final connectionProfile = this.connectionProfile; |
| final connectionProfileName = this.connectionProfileName; |
| final fullHierarchy = this.fullHierarchy; |
| final hierarchyDepth = this.hierarchyDepth; |
| final mongodbCluster = this.mongodbCluster; |
| final mysqlRdbms = this.mysqlRdbms; |
| final oracleRdbms = this.oracleRdbms; |
| final postgresqlRdbms = this.postgresqlRdbms; |
| final salesforceOrg = this.salesforceOrg; |
| final spannerDatabase = this.spannerDatabase; |
| final sqlServerRdbms = this.sqlServerRdbms; |
| return { |
| 'connectionProfile': ?connectionProfile, |
| 'connectionProfileName': ?connectionProfileName, |
| 'fullHierarchy': ?fullHierarchy, |
| 'hierarchyDepth': ?hierarchyDepth, |
| 'mongodbCluster': ?mongodbCluster, |
| 'mysqlRdbms': ?mysqlRdbms, |
| 'oracleRdbms': ?oracleRdbms, |
| 'postgresqlRdbms': ?postgresqlRdbms, |
| 'salesforceOrg': ?salesforceOrg, |
| 'spannerDatabase': ?spannerDatabase, |
| 'sqlServerRdbms': ?sqlServerRdbms, |
| }; |
| } |
| } |
| |
| /// Response from a discover request. |
| class DiscoverConnectionProfileResponse { |
| /// Enriched MongoDB cluster. |
| MongodbCluster? mongodbCluster; |
| |
| /// Enriched MySQL RDBMS object. |
| MysqlRdbms? mysqlRdbms; |
| |
| /// Enriched Oracle RDBMS object. |
| OracleRdbms? oracleRdbms; |
| |
| /// Enriched PostgreSQL RDBMS object. |
| PostgresqlRdbms? postgresqlRdbms; |
| |
| /// Enriched Salesforce organization. |
| SalesforceOrg? salesforceOrg; |
| |
| /// Enriched Spanner database. |
| SpannerDatabase? spannerDatabase; |
| |
| /// Enriched SQLServer RDBMS object. |
| SqlServerRdbms? sqlServerRdbms; |
| |
| DiscoverConnectionProfileResponse({ |
| this.mongodbCluster, |
| this.mysqlRdbms, |
| this.oracleRdbms, |
| this.postgresqlRdbms, |
| this.salesforceOrg, |
| this.spannerDatabase, |
| this.sqlServerRdbms, |
| }); |
| |
| DiscoverConnectionProfileResponse.fromJson(core.Map json_) |
| : this( |
| mongodbCluster: json_.containsKey('mongodbCluster') |
| ? MongodbCluster.fromJson( |
| json_['mongodbCluster'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| mysqlRdbms: json_.containsKey('mysqlRdbms') |
| ? MysqlRdbms.fromJson( |
| json_['mysqlRdbms'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| oracleRdbms: json_.containsKey('oracleRdbms') |
| ? OracleRdbms.fromJson( |
| json_['oracleRdbms'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| postgresqlRdbms: json_.containsKey('postgresqlRdbms') |
| ? PostgresqlRdbms.fromJson( |
| json_['postgresqlRdbms'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| salesforceOrg: json_.containsKey('salesforceOrg') |
| ? SalesforceOrg.fromJson( |
| json_['salesforceOrg'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| spannerDatabase: json_.containsKey('spannerDatabase') |
| ? SpannerDatabase.fromJson( |
| json_['spannerDatabase'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sqlServerRdbms: json_.containsKey('sqlServerRdbms') |
| ? SqlServerRdbms.fromJson( |
| json_['sqlServerRdbms'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final mongodbCluster = this.mongodbCluster; |
| final mysqlRdbms = this.mysqlRdbms; |
| final oracleRdbms = this.oracleRdbms; |
| final postgresqlRdbms = this.postgresqlRdbms; |
| final salesforceOrg = this.salesforceOrg; |
| final spannerDatabase = this.spannerDatabase; |
| final sqlServerRdbms = this.sqlServerRdbms; |
| return { |
| 'mongodbCluster': ?mongodbCluster, |
| 'mysqlRdbms': ?mysqlRdbms, |
| 'oracleRdbms': ?oracleRdbms, |
| 'postgresqlRdbms': ?postgresqlRdbms, |
| 'salesforceOrg': ?salesforceOrg, |
| 'spannerDatabase': ?spannerDatabase, |
| 'sqlServerRdbms': ?sqlServerRdbms, |
| }; |
| } |
| } |
| |
| /// Configuration to drop large object values. |
| typedef DropLargeObjects = $Empty; |
| |
| /// A generic empty message that you can re-use to avoid defining duplicated |
| /// empty messages in your APIs. |
| /// |
| /// A typical example is to use it as the request or the response type of an API |
| /// method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns |
| /// (google.protobuf.Empty); } |
| typedef Empty = $Empty; |
| |
| /// Message to represent the option where Datastream will enforce encryption and |
| /// authenticate server identity. |
| /// |
| /// ca_certificate must be set if user selects this option. |
| class EncryptionAndServerValidation { |
| /// Input only. |
| /// |
| /// PEM-encoded certificate of the CA that signed the source database server's |
| /// certificate. |
| /// |
| /// Optional. |
| core.String? caCertificate; |
| |
| /// The hostname mentioned in the Subject or SAN extension of the server |
| /// certificate. |
| /// |
| /// This field is used for bypassing the hostname validation while verifying |
| /// server certificate. This is required for scenarios where the host name |
| /// that datastream connects to is different from the certificate's subject. |
| /// This specifically happens for private connectivity. It could also happen |
| /// when the customer provides a public IP in connection profile but the same |
| /// is not present in the server certificate. |
| /// |
| /// Optional. |
| core.String? serverCertificateHostname; |
| |
| EncryptionAndServerValidation({ |
| this.caCertificate, |
| this.serverCertificateHostname, |
| }); |
| |
| EncryptionAndServerValidation.fromJson(core.Map json_) |
| : this( |
| caCertificate: json_['caCertificate'] as core.String?, |
| serverCertificateHostname: |
| json_['serverCertificateHostname'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final caCertificate = this.caCertificate; |
| final serverCertificateHostname = this.serverCertificateHostname; |
| return { |
| 'caCertificate': ?caCertificate, |
| 'serverCertificateHostname': ?serverCertificateHostname, |
| }; |
| } |
| } |
| |
| /// Message to represent the option where encryption is not enforced. |
| /// |
| /// An empty message right now to allow future extensibility. |
| typedef EncryptionNotEnforced = $Empty; |
| |
| /// Represent a user-facing Error. |
| class Error { |
| /// Additional information about the error. |
| core.Map<core.String, core.String>? details; |
| |
| /// The time when the error occurred. |
| core.String? errorTime; |
| |
| /// A unique identifier for this specific error, allowing it to be traced |
| /// throughout the system in logs and API responses. |
| core.String? errorUuid; |
| |
| /// A message containing more information about the error that occurred. |
| core.String? message; |
| |
| /// A title that explains the reason for the error. |
| core.String? reason; |
| |
| Error({ |
| this.details, |
| this.errorTime, |
| this.errorUuid, |
| this.message, |
| this.reason, |
| }); |
| |
| Error.fromJson(core.Map json_) |
| : this( |
| details: (json_['details'] as core.Map<core.String, core.dynamic>?) |
| ?.map((key, value) => core.MapEntry(key, value as core.String)), |
| errorTime: json_['errorTime'] as core.String?, |
| errorUuid: json_['errorUuid'] as core.String?, |
| message: json_['message'] as core.String?, |
| reason: json_['reason'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final details = this.details; |
| final errorTime = this.errorTime; |
| final errorUuid = this.errorUuid; |
| final message = this.message; |
| final reason = this.reason; |
| return { |
| 'details': ?details, |
| 'errorTime': ?errorTime, |
| 'errorUuid': ?errorUuid, |
| 'message': ?message, |
| 'reason': ?reason, |
| }; |
| } |
| } |
| |
| /// Represents a filter for included data on a stream object. |
| class EventFilter { |
| /// An SQL-query Where clause selecting which data should be included, not |
| /// including the "WHERE" keyword. |
| /// |
| /// e.g., `t.key1 = 'value1' AND t.key2 = 'value2'` |
| core.String? sqlWhereClause; |
| |
| EventFilter({this.sqlWhereClause}); |
| |
| EventFilter.fromJson(core.Map json_) |
| : this(sqlWhereClause: json_['sqlWhereClause'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final sqlWhereClause = this.sqlWhereClause; |
| return {'sqlWhereClause': ?sqlWhereClause}; |
| } |
| } |
| |
| /// Response message for a 'FetchStaticIps' response. |
| class FetchStaticIpsResponse { |
| /// A token that can be sent as `page_token` to retrieve the next page. |
| /// |
| /// If this field is omitted, there are no subsequent pages. |
| core.String? nextPageToken; |
| |
| /// list of static ips by account |
| core.List<core.String>? staticIps; |
| |
| FetchStaticIpsResponse({this.nextPageToken, this.staticIps}); |
| |
| FetchStaticIpsResponse.fromJson(core.Map json_) |
| : this( |
| nextPageToken: json_['nextPageToken'] as core.String?, |
| staticIps: (json_['staticIps'] as core.List?) |
| ?.map((value) => value as core.String) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final nextPageToken = this.nextPageToken; |
| final staticIps = this.staticIps; |
| return {'nextPageToken': ?nextPageToken, 'staticIps': ?staticIps}; |
| } |
| } |
| |
| /// Forward SSH Tunnel connectivity. |
| typedef ForwardSshTunnelConnectivity = $ForwardSshTunnelConnectivity; |
| |
| /// Google Cloud Storage destination configuration |
| class GcsDestinationConfig { |
| /// AVRO file format configuration. |
| AvroFileFormat? avroFileFormat; |
| |
| /// The maximum duration for which new events are added before a file is |
| /// closed and a new file is created. |
| /// |
| /// Values within the range of 15-60 seconds are allowed. |
| core.String? fileRotationInterval; |
| |
| /// The maximum file size to be saved in the bucket. |
| core.int? fileRotationMb; |
| |
| /// JSON file format configuration. |
| JsonFileFormat? jsonFileFormat; |
| |
| /// Path inside the Cloud Storage bucket to write data to. |
| core.String? path; |
| |
| GcsDestinationConfig({ |
| this.avroFileFormat, |
| this.fileRotationInterval, |
| this.fileRotationMb, |
| this.jsonFileFormat, |
| this.path, |
| }); |
| |
| GcsDestinationConfig.fromJson(core.Map json_) |
| : this( |
| avroFileFormat: json_.containsKey('avroFileFormat') |
| ? AvroFileFormat.fromJson( |
| json_['avroFileFormat'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| fileRotationInterval: json_['fileRotationInterval'] as core.String?, |
| fileRotationMb: json_['fileRotationMb'] as core.int?, |
| jsonFileFormat: json_.containsKey('jsonFileFormat') |
| ? JsonFileFormat.fromJson( |
| json_['jsonFileFormat'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| path: json_['path'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final avroFileFormat = this.avroFileFormat; |
| final fileRotationInterval = this.fileRotationInterval; |
| final fileRotationMb = this.fileRotationMb; |
| final jsonFileFormat = this.jsonFileFormat; |
| final path = this.path; |
| return { |
| 'avroFileFormat': ?avroFileFormat, |
| 'fileRotationInterval': ?fileRotationInterval, |
| 'fileRotationMb': ?fileRotationMb, |
| 'jsonFileFormat': ?jsonFileFormat, |
| 'path': ?path, |
| }; |
| } |
| } |
| |
| /// Profile for connecting to a Cloud Storage destination. |
| class GcsProfile { |
| /// The Cloud Storage bucket name. |
| /// |
| /// Required. |
| core.String? bucket; |
| |
| /// The root path inside the Cloud Storage bucket. |
| /// |
| /// Optional. |
| core.String? rootPath; |
| |
| GcsProfile({this.bucket, this.rootPath}); |
| |
| GcsProfile.fromJson(core.Map json_) |
| : this( |
| bucket: json_['bucket'] as core.String?, |
| rootPath: json_['rootPath'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final bucket = this.bucket; |
| final rootPath = this.rootPath; |
| return {'bucket': ?bucket, 'rootPath': ?rootPath}; |
| } |
| } |
| |
| /// Use GTID based replication. |
| typedef Gtid = $Empty; |
| |
| /// A HostAddress represents a transport end point, which is the combination of |
| /// an IP address or hostname and a port number. |
| class HostAddress { |
| /// Hostname for the connection. |
| /// |
| /// Required. |
| core.String? hostname; |
| |
| /// Port for the connection. |
| /// |
| /// Optional. |
| core.int? port; |
| |
| HostAddress({this.hostname, this.port}); |
| |
| HostAddress.fromJson(core.Map json_) |
| : this( |
| hostname: json_['hostname'] as core.String?, |
| port: json_['port'] as core.int?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final hostname = this.hostname; |
| final port = this.port; |
| return {'hostname': ?hostname, 'port': ?port}; |
| } |
| } |
| |
| /// Ingestion time partitioning. |
| /// |
| /// see https://cloud.google.com/bigquery/docs/partitioned-tables#ingestion_time |
| class IngestionTimePartition { |
| /// Partition granularity |
| /// |
| /// Optional. |
| /// Possible string values are: |
| /// - "PARTITIONING_TIME_GRANULARITY_UNSPECIFIED" : Unspecified partitioing |
| /// interval. |
| /// - "PARTITIONING_TIME_GRANULARITY_HOUR" : Hourly partitioning. |
| /// - "PARTITIONING_TIME_GRANULARITY_DAY" : Daily partitioning. |
| /// - "PARTITIONING_TIME_GRANULARITY_MONTH" : Monthly partitioning. |
| /// - "PARTITIONING_TIME_GRANULARITY_YEAR" : Yearly partitioning. |
| core.String? partitioningTimeGranularity; |
| |
| IngestionTimePartition({this.partitioningTimeGranularity}); |
| |
| IngestionTimePartition.fromJson(core.Map json_) |
| : this( |
| partitioningTimeGranularity: |
| json_['partitioningTimeGranularity'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final partitioningTimeGranularity = this.partitioningTimeGranularity; |
| return {'partitioningTimeGranularity': ?partitioningTimeGranularity}; |
| } |
| } |
| |
| /// Integer range partitioning. |
| /// |
| /// see https://cloud.google.com/bigquery/docs/partitioned-tables#integer_range |
| class IntegerRangePartition { |
| /// The partitioning column. |
| /// |
| /// Required. |
| core.String? column; |
| |
| /// The ending value for range partitioning (exclusive). |
| /// |
| /// Required. |
| core.String? end; |
| |
| /// The interval of each range within the partition. |
| /// |
| /// Required. |
| core.String? interval; |
| |
| /// The starting value for range partitioning (inclusive). |
| /// |
| /// Required. |
| core.String? start; |
| |
| IntegerRangePartition({this.column, this.end, this.interval, this.start}); |
| |
| IntegerRangePartition.fromJson(core.Map json_) |
| : this( |
| column: json_['column'] as core.String?, |
| end: json_['end'] as core.String?, |
| interval: json_['interval'] as core.String?, |
| start: json_['start'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final column = this.column; |
| final end = this.end; |
| final interval = this.interval; |
| final start = this.start; |
| return { |
| 'column': ?column, |
| 'end': ?end, |
| 'interval': ?interval, |
| 'start': ?start, |
| }; |
| } |
| } |
| |
| /// JSON file format configuration. |
| class JsonFileFormat { |
| /// Compression of the loaded JSON file. |
| /// Possible string values are: |
| /// - "JSON_COMPRESSION_UNSPECIFIED" : Unspecified json file compression. |
| /// - "NO_COMPRESSION" : Do not compress JSON file. |
| /// - "GZIP" : Gzip compression. |
| core.String? compression; |
| |
| /// The schema file format along JSON data files. |
| /// Possible string values are: |
| /// - "SCHEMA_FILE_FORMAT_UNSPECIFIED" : Unspecified schema file format. |
| /// - "NO_SCHEMA_FILE" : Do not attach schema file. |
| /// - "AVRO_SCHEMA_FILE" : Avro schema format. |
| core.String? schemaFileFormat; |
| |
| JsonFileFormat({this.compression, this.schemaFileFormat}); |
| |
| JsonFileFormat.fromJson(core.Map json_) |
| : this( |
| compression: json_['compression'] as core.String?, |
| schemaFileFormat: json_['schemaFileFormat'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final compression = this.compression; |
| final schemaFileFormat = this.schemaFileFormat; |
| return {'compression': ?compression, 'schemaFileFormat': ?schemaFileFormat}; |
| } |
| } |
| |
| /// Response message for listing connection profiles. |
| class ListConnectionProfilesResponse { |
| /// List of connection profiles. |
| core.List<ConnectionProfile>? connectionProfiles; |
| |
| /// A token, which can be sent as `page_token` to retrieve the next page. |
| /// |
| /// If this field is omitted, there are no subsequent pages. |
| core.String? nextPageToken; |
| |
| /// Locations that could not be reached. |
| core.List<core.String>? unreachable; |
| |
| ListConnectionProfilesResponse({ |
| this.connectionProfiles, |
| this.nextPageToken, |
| this.unreachable, |
| }); |
| |
| ListConnectionProfilesResponse.fromJson(core.Map json_) |
| : this( |
| connectionProfiles: (json_['connectionProfiles'] as core.List?) |
| ?.map( |
| (value) => ConnectionProfile.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| nextPageToken: json_['nextPageToken'] as core.String?, |
| unreachable: (json_['unreachable'] as core.List?) |
| ?.map((value) => value as core.String) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final connectionProfiles = this.connectionProfiles; |
| final nextPageToken = this.nextPageToken; |
| final unreachable = this.unreachable; |
| return { |
| 'connectionProfiles': ?connectionProfiles, |
| 'nextPageToken': ?nextPageToken, |
| 'unreachable': ?unreachable, |
| }; |
| } |
| } |
| |
| /// The response message for Locations.ListLocations. |
| class ListLocationsResponse { |
| /// A list of locations that matches the specified filter in the request. |
| core.List<Location>? locations; |
| |
| /// The standard List next-page token. |
| core.String? nextPageToken; |
| |
| ListLocationsResponse({this.locations, this.nextPageToken}); |
| |
| ListLocationsResponse.fromJson(core.Map json_) |
| : this( |
| locations: (json_['locations'] as core.List?) |
| ?.map( |
| (value) => Location.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| nextPageToken: json_['nextPageToken'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final locations = this.locations; |
| final nextPageToken = this.nextPageToken; |
| return {'locations': ?locations, 'nextPageToken': ?nextPageToken}; |
| } |
| } |
| |
| /// The response message for Operations.ListOperations. |
| class ListOperationsResponse { |
| /// The standard List next-page token. |
| core.String? nextPageToken; |
| |
| /// A list of operations that matches the specified filter in the request. |
| core.List<Operation>? operations; |
| |
| /// Unordered list. |
| /// |
| /// Unreachable resources. Populated when the request sets |
| /// `ListOperationsRequest.return_partial_success` and reads across |
| /// collections. For example, when attempting to list all resources across all |
| /// supported locations. |
| core.List<core.String>? unreachable; |
| |
| ListOperationsResponse({ |
| this.nextPageToken, |
| this.operations, |
| this.unreachable, |
| }); |
| |
| ListOperationsResponse.fromJson(core.Map json_) |
| : this( |
| nextPageToken: json_['nextPageToken'] as core.String?, |
| operations: (json_['operations'] as core.List?) |
| ?.map( |
| (value) => Operation.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| unreachable: (json_['unreachable'] as core.List?) |
| ?.map((value) => value as core.String) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final nextPageToken = this.nextPageToken; |
| final operations = this.operations; |
| final unreachable = this.unreachable; |
| return { |
| 'nextPageToken': ?nextPageToken, |
| 'operations': ?operations, |
| 'unreachable': ?unreachable, |
| }; |
| } |
| } |
| |
| /// Response containing a list of private connection configurations. |
| class ListPrivateConnectionsResponse { |
| /// A token, which can be sent as `page_token` to retrieve the next page. |
| /// |
| /// If this field is omitted, there are no subsequent pages. |
| core.String? nextPageToken; |
| |
| /// List of private connectivity configurations. |
| core.List<PrivateConnection>? privateConnections; |
| |
| /// Locations that could not be reached. |
| core.List<core.String>? unreachable; |
| |
| ListPrivateConnectionsResponse({ |
| this.nextPageToken, |
| this.privateConnections, |
| this.unreachable, |
| }); |
| |
| ListPrivateConnectionsResponse.fromJson(core.Map json_) |
| : this( |
| nextPageToken: json_['nextPageToken'] as core.String?, |
| privateConnections: (json_['privateConnections'] as core.List?) |
| ?.map( |
| (value) => PrivateConnection.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| unreachable: (json_['unreachable'] as core.List?) |
| ?.map((value) => value as core.String) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final nextPageToken = this.nextPageToken; |
| final privateConnections = this.privateConnections; |
| final unreachable = this.unreachable; |
| return { |
| 'nextPageToken': ?nextPageToken, |
| 'privateConnections': ?privateConnections, |
| 'unreachable': ?unreachable, |
| }; |
| } |
| } |
| |
| /// Route list response. |
| class ListRoutesResponse { |
| /// A token, which can be sent as `page_token` to retrieve the next page. |
| /// |
| /// If this field is omitted, there are no subsequent pages. |
| core.String? nextPageToken; |
| |
| /// List of Routes. |
| core.List<Route>? routes; |
| |
| /// Locations that could not be reached. |
| core.List<core.String>? unreachable; |
| |
| ListRoutesResponse({this.nextPageToken, this.routes, this.unreachable}); |
| |
| ListRoutesResponse.fromJson(core.Map json_) |
| : this( |
| nextPageToken: json_['nextPageToken'] as core.String?, |
| routes: (json_['routes'] as core.List?) |
| ?.map( |
| (value) => |
| Route.fromJson(value as core.Map<core.String, core.dynamic>), |
| ) |
| .toList(), |
| unreachable: (json_['unreachable'] as core.List?) |
| ?.map((value) => value as core.String) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final nextPageToken = this.nextPageToken; |
| final routes = this.routes; |
| final unreachable = this.unreachable; |
| return { |
| 'nextPageToken': ?nextPageToken, |
| 'routes': ?routes, |
| 'unreachable': ?unreachable, |
| }; |
| } |
| } |
| |
| /// Response containing the objects for a stream. |
| class ListStreamObjectsResponse { |
| /// A token, which can be sent as `page_token` to retrieve the next page. |
| core.String? nextPageToken; |
| |
| /// List of stream objects. |
| core.List<StreamObject>? streamObjects; |
| |
| ListStreamObjectsResponse({this.nextPageToken, this.streamObjects}); |
| |
| ListStreamObjectsResponse.fromJson(core.Map json_) |
| : this( |
| nextPageToken: json_['nextPageToken'] as core.String?, |
| streamObjects: (json_['streamObjects'] as core.List?) |
| ?.map( |
| (value) => StreamObject.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final nextPageToken = this.nextPageToken; |
| final streamObjects = this.streamObjects; |
| return {'nextPageToken': ?nextPageToken, 'streamObjects': ?streamObjects}; |
| } |
| } |
| |
| /// Response message for listing streams. |
| class ListStreamsResponse { |
| /// A token, which can be sent as `page_token` to retrieve the next page. |
| /// |
| /// If this field is omitted, there are no subsequent pages. |
| core.String? nextPageToken; |
| |
| /// List of streams |
| core.List<Stream>? streams; |
| |
| /// Locations that could not be reached. |
| core.List<core.String>? unreachable; |
| |
| ListStreamsResponse({this.nextPageToken, this.streams, this.unreachable}); |
| |
| ListStreamsResponse.fromJson(core.Map json_) |
| : this( |
| nextPageToken: json_['nextPageToken'] as core.String?, |
| streams: (json_['streams'] as core.List?) |
| ?.map( |
| (value) => |
| Stream.fromJson(value as core.Map<core.String, core.dynamic>), |
| ) |
| .toList(), |
| unreachable: (json_['unreachable'] as core.List?) |
| ?.map((value) => value as core.String) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final nextPageToken = this.nextPageToken; |
| final streams = this.streams; |
| final unreachable = this.unreachable; |
| return { |
| 'nextPageToken': ?nextPageToken, |
| 'streams': ?streams, |
| 'unreachable': ?unreachable, |
| }; |
| } |
| } |
| |
| /// A resource that represents a Google Cloud location. |
| typedef Location = $Location00; |
| |
| /// Configuration to specify the Oracle directories to access the log files. |
| typedef LogFileDirectories = $LogFileDirectories; |
| |
| /// Configuration to use LogMiner CDC method. |
| typedef LogMiner = $Empty; |
| |
| /// Request for looking up a specific stream object by its source object |
| /// identifier. |
| class LookupStreamObjectRequest { |
| /// The source object identifier which maps to the stream object. |
| /// |
| /// Required. |
| SourceObjectIdentifier? sourceObjectIdentifier; |
| |
| LookupStreamObjectRequest({this.sourceObjectIdentifier}); |
| |
| LookupStreamObjectRequest.fromJson(core.Map json_) |
| : this( |
| sourceObjectIdentifier: json_.containsKey('sourceObjectIdentifier') |
| ? SourceObjectIdentifier.fromJson( |
| json_['sourceObjectIdentifier'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final sourceObjectIdentifier = this.sourceObjectIdentifier; |
| return {'sourceObjectIdentifier': ?sourceObjectIdentifier}; |
| } |
| } |
| |
| /// Merge mode defines that all changes to a table will be merged at the |
| /// destination table. |
| typedef Merge = $Empty; |
| |
| /// MongoDB change stream position |
| class MongodbChangeStreamPosition { |
| /// The timestamp to start change stream from. |
| /// |
| /// Required. |
| core.String? startTime; |
| |
| MongodbChangeStreamPosition({this.startTime}); |
| |
| MongodbChangeStreamPosition.fromJson(core.Map json_) |
| : this(startTime: json_['startTime'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final startTime = this.startTime; |
| return {'startTime': ?startTime}; |
| } |
| } |
| |
| /// MongoDB Cluster structure. |
| class MongodbCluster { |
| /// MongoDB databases in the cluster. |
| core.List<MongodbDatabase>? databases; |
| |
| MongodbCluster({this.databases}); |
| |
| MongodbCluster.fromJson(core.Map json_) |
| : this( |
| databases: (json_['databases'] as core.List?) |
| ?.map( |
| (value) => MongodbDatabase.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final databases = this.databases; |
| return {'databases': ?databases}; |
| } |
| } |
| |
| /// MongoDB Collection. |
| class MongodbCollection { |
| /// The collection name. |
| core.String? collection; |
| |
| /// Fields in the collection. |
| core.List<MongodbField>? fields; |
| |
| MongodbCollection({this.collection, this.fields}); |
| |
| MongodbCollection.fromJson(core.Map json_) |
| : this( |
| collection: json_['collection'] as core.String?, |
| fields: (json_['fields'] as core.List?) |
| ?.map( |
| (value) => MongodbField.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final collection = this.collection; |
| final fields = this.fields; |
| return {'collection': ?collection, 'fields': ?fields}; |
| } |
| } |
| |
| /// MongoDB Database. |
| class MongodbDatabase { |
| /// Collections in the database. |
| core.List<MongodbCollection>? collections; |
| |
| /// The database name. |
| core.String? database; |
| |
| MongodbDatabase({this.collections, this.database}); |
| |
| MongodbDatabase.fromJson(core.Map json_) |
| : this( |
| collections: (json_['collections'] as core.List?) |
| ?.map( |
| (value) => MongodbCollection.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| database: json_['database'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final collections = this.collections; |
| final database = this.database; |
| return {'collections': ?collections, 'database': ?database}; |
| } |
| } |
| |
| /// MongoDB Field. |
| class MongodbField { |
| /// The field name. |
| core.String? field; |
| |
| MongodbField({this.field}); |
| |
| MongodbField.fromJson(core.Map json_) |
| : this(field: json_['field'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final field = this.field; |
| return {'field': ?field}; |
| } |
| } |
| |
| /// MongoDB data source object identifier. |
| class MongodbObjectIdentifier { |
| /// The collection name. |
| /// |
| /// Required. |
| core.String? collection; |
| |
| /// The database name. |
| /// |
| /// Required. |
| core.String? database; |
| |
| MongodbObjectIdentifier({this.collection, this.database}); |
| |
| MongodbObjectIdentifier.fromJson(core.Map json_) |
| : this( |
| collection: json_['collection'] as core.String?, |
| database: json_['database'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final collection = this.collection; |
| final database = this.database; |
| return {'collection': ?collection, 'database': ?database}; |
| } |
| } |
| |
| /// Profile for connecting to a MongoDB source. |
| class MongodbProfile { |
| /// Specifies additional options for the MongoDB connection. |
| /// |
| /// The options should be sent as key-value pairs, for example: |
| /// `additional_options = {"serverSelectionTimeoutMS": "10000", |
| /// "directConnection": "true"}`. Keys are case-sensitive and should match the |
| /// official MongoDB connection string options: |
| /// https://www.mongodb.com/docs/manual/reference/connection-string-options/ |
| /// The server will not modify the values provided by the user. |
| /// |
| /// Optional. |
| core.Map<core.String, core.String>? additionalOptions; |
| |
| /// List of host addresses for a MongoDB cluster. |
| /// |
| /// For SRV connection format, this list must contain exactly one DNS host |
| /// without a port. For Standard connection format, this list must contain all |
| /// the required hosts in the cluster with their respective ports. |
| /// |
| /// Required. |
| core.List<HostAddress>? hostAddresses; |
| |
| /// Password for the MongoDB connection. |
| /// |
| /// Mutually exclusive with the `secret_manager_stored_password` field. |
| /// |
| /// Optional. |
| core.String? password; |
| |
| /// Name of the replica set. |
| /// |
| /// Only needed for self hosted replica set type MongoDB cluster. For SRV |
| /// connection format, this field must be empty. For Standard connection |
| /// format, this field must be specified. |
| /// |
| /// Optional. |
| core.String? replicaSet; |
| |
| /// A reference to a Secret Manager resource name storing the SQLServer |
| /// connection password. |
| /// |
| /// Mutually exclusive with the `password` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredPassword; |
| |
| /// Srv connection format. |
| SrvConnectionFormat? srvConnectionFormat; |
| |
| /// SSL configuration for the MongoDB connection. |
| /// |
| /// Optional. |
| MongodbSslConfig? sslConfig; |
| |
| /// Standard connection format. |
| StandardConnectionFormat? standardConnectionFormat; |
| |
| /// Username for the MongoDB connection. |
| /// |
| /// Required. |
| core.String? username; |
| |
| MongodbProfile({ |
| this.additionalOptions, |
| this.hostAddresses, |
| this.password, |
| this.replicaSet, |
| this.secretManagerStoredPassword, |
| this.srvConnectionFormat, |
| this.sslConfig, |
| this.standardConnectionFormat, |
| this.username, |
| }); |
| |
| MongodbProfile.fromJson(core.Map json_) |
| : this( |
| additionalOptions: |
| (json_['additionalOptions'] as core.Map<core.String, core.dynamic>?) |
| ?.map((key, value) => core.MapEntry(key, value as core.String)), |
| hostAddresses: (json_['hostAddresses'] as core.List?) |
| ?.map( |
| (value) => HostAddress.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| password: json_['password'] as core.String?, |
| replicaSet: json_['replicaSet'] as core.String?, |
| secretManagerStoredPassword: |
| json_['secretManagerStoredPassword'] as core.String?, |
| srvConnectionFormat: json_.containsKey('srvConnectionFormat') |
| ? SrvConnectionFormat.fromJson( |
| json_['srvConnectionFormat'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sslConfig: json_.containsKey('sslConfig') |
| ? MongodbSslConfig.fromJson( |
| json_['sslConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| standardConnectionFormat: json_.containsKey('standardConnectionFormat') |
| ? StandardConnectionFormat.fromJson( |
| json_['standardConnectionFormat'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| username: json_['username'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final additionalOptions = this.additionalOptions; |
| final hostAddresses = this.hostAddresses; |
| final password = this.password; |
| final replicaSet = this.replicaSet; |
| final secretManagerStoredPassword = this.secretManagerStoredPassword; |
| final srvConnectionFormat = this.srvConnectionFormat; |
| final sslConfig = this.sslConfig; |
| final standardConnectionFormat = this.standardConnectionFormat; |
| final username = this.username; |
| return { |
| 'additionalOptions': ?additionalOptions, |
| 'hostAddresses': ?hostAddresses, |
| 'password': ?password, |
| 'replicaSet': ?replicaSet, |
| 'secretManagerStoredPassword': ?secretManagerStoredPassword, |
| 'srvConnectionFormat': ?srvConnectionFormat, |
| 'sslConfig': ?sslConfig, |
| 'standardConnectionFormat': ?standardConnectionFormat, |
| 'username': ?username, |
| }; |
| } |
| } |
| |
| /// Configuration for syncing data from a MongoDB source. |
| class MongodbSourceConfig { |
| /// The MongoDB collections to exclude from the stream. |
| MongodbCluster? excludeObjects; |
| |
| /// The MongoDB collections to include in the stream. |
| MongodbCluster? includeObjects; |
| |
| /// MongoDB JSON mode to use for the stream. |
| /// |
| /// Optional. |
| /// Possible string values are: |
| /// - "MONGODB_JSON_MODE_UNSPECIFIED" : Unspecified JSON mode. |
| /// - "STRICT" : Strict JSON mode. |
| /// - "CANONICAL" : Canonical JSON mode. |
| core.String? jsonMode; |
| |
| /// Maximum number of concurrent backfill tasks. |
| /// |
| /// The number should be non-negative and less than or equal to 50. If not set |
| /// (or set to 0), the system's default value is used |
| /// |
| /// Optional. |
| core.int? maxConcurrentBackfillTasks; |
| |
| MongodbSourceConfig({ |
| this.excludeObjects, |
| this.includeObjects, |
| this.jsonMode, |
| this.maxConcurrentBackfillTasks, |
| }); |
| |
| MongodbSourceConfig.fromJson(core.Map json_) |
| : this( |
| excludeObjects: json_.containsKey('excludeObjects') |
| ? MongodbCluster.fromJson( |
| json_['excludeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| includeObjects: json_.containsKey('includeObjects') |
| ? MongodbCluster.fromJson( |
| json_['includeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| jsonMode: json_['jsonMode'] as core.String?, |
| maxConcurrentBackfillTasks: |
| json_['maxConcurrentBackfillTasks'] as core.int?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final excludeObjects = this.excludeObjects; |
| final includeObjects = this.includeObjects; |
| final jsonMode = this.jsonMode; |
| final maxConcurrentBackfillTasks = this.maxConcurrentBackfillTasks; |
| return { |
| 'excludeObjects': ?excludeObjects, |
| 'includeObjects': ?includeObjects, |
| 'jsonMode': ?jsonMode, |
| 'maxConcurrentBackfillTasks': ?maxConcurrentBackfillTasks, |
| }; |
| } |
| } |
| |
| /// MongoDB SSL configuration information. |
| class MongodbSslConfig { |
| /// Input only. |
| /// |
| /// PEM-encoded certificate of the CA that signed the source database server's |
| /// certificate. |
| /// |
| /// Optional. |
| core.String? caCertificate; |
| |
| /// Indicates whether the ca_certificate field is set. |
| /// |
| /// Output only. |
| core.bool? caCertificateSet; |
| |
| /// Input only. |
| /// |
| /// PEM-encoded certificate that will be used by the replica to authenticate |
| /// against the source database server. If this field is used then the |
| /// 'client_key' and the 'ca_certificate' fields are mandatory. |
| /// |
| /// Optional. |
| core.String? clientCertificate; |
| |
| /// Indicates whether the client_certificate field is set. |
| /// |
| /// Output only. |
| core.bool? clientCertificateSet; |
| |
| /// Input only. |
| /// |
| /// PEM-encoded private key associated with the Client Certificate. If this |
| /// field is used then the 'client_certificate' and the 'ca_certificate' |
| /// fields are mandatory. |
| /// |
| /// Optional. |
| core.String? clientKey; |
| |
| /// Indicates whether the client_key field is set. |
| /// |
| /// Output only. |
| core.bool? clientKeySet; |
| |
| /// Input only. |
| /// |
| /// A reference to a Secret Manager resource name storing the PEM-encoded |
| /// private key associated with the Client Certificate. If this field is used |
| /// then the 'client_certificate' and the 'ca_certificate' fields are |
| /// mandatory. Mutually exclusive with the `client_key` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredClientKey; |
| |
| MongodbSslConfig({ |
| this.caCertificate, |
| this.caCertificateSet, |
| this.clientCertificate, |
| this.clientCertificateSet, |
| this.clientKey, |
| this.clientKeySet, |
| this.secretManagerStoredClientKey, |
| }); |
| |
| MongodbSslConfig.fromJson(core.Map json_) |
| : this( |
| caCertificate: json_['caCertificate'] as core.String?, |
| caCertificateSet: json_['caCertificateSet'] as core.bool?, |
| clientCertificate: json_['clientCertificate'] as core.String?, |
| clientCertificateSet: json_['clientCertificateSet'] as core.bool?, |
| clientKey: json_['clientKey'] as core.String?, |
| clientKeySet: json_['clientKeySet'] as core.bool?, |
| secretManagerStoredClientKey: |
| json_['secretManagerStoredClientKey'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final caCertificate = this.caCertificate; |
| final caCertificateSet = this.caCertificateSet; |
| final clientCertificate = this.clientCertificate; |
| final clientCertificateSet = this.clientCertificateSet; |
| final clientKey = this.clientKey; |
| final clientKeySet = this.clientKeySet; |
| final secretManagerStoredClientKey = this.secretManagerStoredClientKey; |
| return { |
| 'caCertificate': ?caCertificate, |
| 'caCertificateSet': ?caCertificateSet, |
| 'clientCertificate': ?clientCertificate, |
| 'clientCertificateSet': ?clientCertificateSet, |
| 'clientKey': ?clientKey, |
| 'clientKeySet': ?clientKeySet, |
| 'secretManagerStoredClientKey': ?secretManagerStoredClientKey, |
| }; |
| } |
| } |
| |
| /// CDC strategy to start replicating from the most recent position in the |
| /// source. |
| typedef MostRecentStartPosition = $Empty; |
| |
| /// MySQL Column. |
| class MysqlColumn { |
| /// Column collation. |
| core.String? collation; |
| |
| /// The column name. |
| core.String? column; |
| |
| /// The MySQL data type. |
| /// |
| /// Full data types list can be found here: |
| /// https://dev.mysql.com/doc/refman/8.0/en/data-types.html |
| core.String? dataType; |
| |
| /// Column length. |
| core.int? length; |
| |
| /// Whether or not the column can accept a null value. |
| core.bool? nullable; |
| |
| /// The ordinal position of the column in the table. |
| core.int? ordinalPosition; |
| |
| /// Column precision. |
| core.int? precision; |
| |
| /// Whether or not the column represents a primary key. |
| core.bool? primaryKey; |
| |
| /// Column scale. |
| core.int? scale; |
| |
| MysqlColumn({ |
| this.collation, |
| this.column, |
| this.dataType, |
| this.length, |
| this.nullable, |
| this.ordinalPosition, |
| this.precision, |
| this.primaryKey, |
| this.scale, |
| }); |
| |
| MysqlColumn.fromJson(core.Map json_) |
| : this( |
| collation: json_['collation'] as core.String?, |
| column: json_['column'] as core.String?, |
| dataType: json_['dataType'] as core.String?, |
| length: json_['length'] as core.int?, |
| nullable: json_['nullable'] as core.bool?, |
| ordinalPosition: json_['ordinalPosition'] as core.int?, |
| precision: json_['precision'] as core.int?, |
| primaryKey: json_['primaryKey'] as core.bool?, |
| scale: json_['scale'] as core.int?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final collation = this.collation; |
| final column = this.column; |
| final dataType = this.dataType; |
| final length = this.length; |
| final nullable = this.nullable; |
| final ordinalPosition = this.ordinalPosition; |
| final precision = this.precision; |
| final primaryKey = this.primaryKey; |
| final scale = this.scale; |
| return { |
| 'collation': ?collation, |
| 'column': ?column, |
| 'dataType': ?dataType, |
| 'length': ?length, |
| 'nullable': ?nullable, |
| 'ordinalPosition': ?ordinalPosition, |
| 'precision': ?precision, |
| 'primaryKey': ?primaryKey, |
| 'scale': ?scale, |
| }; |
| } |
| } |
| |
| /// MySQL database. |
| class MysqlDatabase { |
| /// The database name. |
| core.String? database; |
| |
| /// Tables in the database. |
| core.List<MysqlTable>? mysqlTables; |
| |
| MysqlDatabase({this.database, this.mysqlTables}); |
| |
| MysqlDatabase.fromJson(core.Map json_) |
| : this( |
| database: json_['database'] as core.String?, |
| mysqlTables: (json_['mysqlTables'] as core.List?) |
| ?.map( |
| (value) => MysqlTable.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final database = this.database; |
| final mysqlTables = this.mysqlTables; |
| return {'database': ?database, 'mysqlTables': ?mysqlTables}; |
| } |
| } |
| |
| /// MySQL GTID position |
| class MysqlGtidPosition { |
| /// The gtid set to start replication from. |
| /// |
| /// Required. |
| core.String? gtidSet; |
| |
| MysqlGtidPosition({this.gtidSet}); |
| |
| MysqlGtidPosition.fromJson(core.Map json_) |
| : this(gtidSet: json_['gtidSet'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final gtidSet = this.gtidSet; |
| return {'gtidSet': ?gtidSet}; |
| } |
| } |
| |
| /// MySQL log position |
| class MysqlLogPosition { |
| /// The binary log file name. |
| /// |
| /// Required. |
| core.String? logFile; |
| |
| /// The position within the binary log file. |
| /// |
| /// Default is head of file. |
| /// |
| /// Optional. |
| core.int? logPosition; |
| |
| MysqlLogPosition({this.logFile, this.logPosition}); |
| |
| MysqlLogPosition.fromJson(core.Map json_) |
| : this( |
| logFile: json_['logFile'] as core.String?, |
| logPosition: json_['logPosition'] as core.int?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final logFile = this.logFile; |
| final logPosition = this.logPosition; |
| return {'logFile': ?logFile, 'logPosition': ?logPosition}; |
| } |
| } |
| |
| /// Mysql data source object identifier. |
| class MysqlObjectIdentifier { |
| /// The database name. |
| /// |
| /// Required. |
| core.String? database; |
| |
| /// The table name. |
| /// |
| /// Required. |
| core.String? table; |
| |
| MysqlObjectIdentifier({this.database, this.table}); |
| |
| MysqlObjectIdentifier.fromJson(core.Map json_) |
| : this( |
| database: json_['database'] as core.String?, |
| table: json_['table'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final database = this.database; |
| final table = this.table; |
| return {'database': ?database, 'table': ?table}; |
| } |
| } |
| |
| /// Profile for connecting to a MySQL source. |
| class MysqlProfile { |
| /// Hostname for the MySQL connection. |
| /// |
| /// Required. |
| core.String? hostname; |
| |
| /// Input only. |
| /// |
| /// Password for the MySQL connection. Mutually exclusive with the |
| /// `secret_manager_stored_password` field. |
| /// |
| /// Optional. |
| core.String? password; |
| |
| /// Port for the MySQL connection, default value is 3306. |
| core.int? port; |
| |
| /// A reference to a Secret Manager resource name storing the MySQL connection |
| /// password. |
| /// |
| /// Mutually exclusive with the `password` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredPassword; |
| |
| /// SSL configuration for the MySQL connection. |
| MysqlSslConfig? sslConfig; |
| |
| /// Username for the MySQL connection. |
| /// |
| /// Required. |
| core.String? username; |
| |
| MysqlProfile({ |
| this.hostname, |
| this.password, |
| this.port, |
| this.secretManagerStoredPassword, |
| this.sslConfig, |
| this.username, |
| }); |
| |
| MysqlProfile.fromJson(core.Map json_) |
| : this( |
| hostname: json_['hostname'] as core.String?, |
| password: json_['password'] as core.String?, |
| port: json_['port'] as core.int?, |
| secretManagerStoredPassword: |
| json_['secretManagerStoredPassword'] as core.String?, |
| sslConfig: json_.containsKey('sslConfig') |
| ? MysqlSslConfig.fromJson( |
| json_['sslConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| username: json_['username'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final hostname = this.hostname; |
| final password = this.password; |
| final port = this.port; |
| final secretManagerStoredPassword = this.secretManagerStoredPassword; |
| final sslConfig = this.sslConfig; |
| final username = this.username; |
| return { |
| 'hostname': ?hostname, |
| 'password': ?password, |
| 'port': ?port, |
| 'secretManagerStoredPassword': ?secretManagerStoredPassword, |
| 'sslConfig': ?sslConfig, |
| 'username': ?username, |
| }; |
| } |
| } |
| |
| /// MySQL database structure |
| class MysqlRdbms { |
| /// Mysql databases on the server |
| core.List<MysqlDatabase>? mysqlDatabases; |
| |
| MysqlRdbms({this.mysqlDatabases}); |
| |
| MysqlRdbms.fromJson(core.Map json_) |
| : this( |
| mysqlDatabases: (json_['mysqlDatabases'] as core.List?) |
| ?.map( |
| (value) => MysqlDatabase.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final mysqlDatabases = this.mysqlDatabases; |
| return {'mysqlDatabases': ?mysqlDatabases}; |
| } |
| } |
| |
| /// Configuration for syncing data from a MySQL source. |
| class MysqlSourceConfig { |
| /// Use Binary log position based replication. |
| BinaryLogPosition? binaryLogPosition; |
| |
| /// The MySQL objects to exclude from the stream. |
| MysqlRdbms? excludeObjects; |
| |
| /// Use GTID based replication. |
| Gtid? gtid; |
| |
| /// The MySQL objects to retrieve from the source. |
| MysqlRdbms? includeObjects; |
| |
| /// Maximum number of concurrent backfill tasks. |
| /// |
| /// The number should be non negative. If not set (or set to 0), the system's |
| /// default value will be used. |
| core.int? maxConcurrentBackfillTasks; |
| |
| /// Maximum number of concurrent CDC tasks. |
| /// |
| /// The number should be non negative. If not set (or set to 0), the system's |
| /// default value will be used. |
| core.int? maxConcurrentCdcTasks; |
| |
| MysqlSourceConfig({ |
| this.binaryLogPosition, |
| this.excludeObjects, |
| this.gtid, |
| this.includeObjects, |
| this.maxConcurrentBackfillTasks, |
| this.maxConcurrentCdcTasks, |
| }); |
| |
| MysqlSourceConfig.fromJson(core.Map json_) |
| : this( |
| binaryLogPosition: json_.containsKey('binaryLogPosition') |
| ? BinaryLogPosition.fromJson( |
| json_['binaryLogPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| excludeObjects: json_.containsKey('excludeObjects') |
| ? MysqlRdbms.fromJson( |
| json_['excludeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| gtid: json_.containsKey('gtid') |
| ? Gtid.fromJson( |
| json_['gtid'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| includeObjects: json_.containsKey('includeObjects') |
| ? MysqlRdbms.fromJson( |
| json_['includeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| maxConcurrentBackfillTasks: |
| json_['maxConcurrentBackfillTasks'] as core.int?, |
| maxConcurrentCdcTasks: json_['maxConcurrentCdcTasks'] as core.int?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final binaryLogPosition = this.binaryLogPosition; |
| final excludeObjects = this.excludeObjects; |
| final gtid = this.gtid; |
| final includeObjects = this.includeObjects; |
| final maxConcurrentBackfillTasks = this.maxConcurrentBackfillTasks; |
| final maxConcurrentCdcTasks = this.maxConcurrentCdcTasks; |
| return { |
| 'binaryLogPosition': ?binaryLogPosition, |
| 'excludeObjects': ?excludeObjects, |
| 'gtid': ?gtid, |
| 'includeObjects': ?includeObjects, |
| 'maxConcurrentBackfillTasks': ?maxConcurrentBackfillTasks, |
| 'maxConcurrentCdcTasks': ?maxConcurrentCdcTasks, |
| }; |
| } |
| } |
| |
| /// MySQL SSL configuration information. |
| class MysqlSslConfig { |
| /// Input only. |
| /// |
| /// PEM-encoded certificate of the CA that signed the source database server's |
| /// certificate. |
| core.String? caCertificate; |
| |
| /// Indicates whether the ca_certificate field is set. |
| /// |
| /// Output only. |
| core.bool? caCertificateSet; |
| |
| /// Input only. |
| /// |
| /// PEM-encoded certificate that will be used by the replica to authenticate |
| /// against the source database server. If this field is used then the |
| /// 'client_key' and the 'ca_certificate' fields are mandatory. |
| /// |
| /// Optional. |
| core.String? clientCertificate; |
| |
| /// Indicates whether the client_certificate field is set. |
| /// |
| /// Output only. |
| core.bool? clientCertificateSet; |
| |
| /// Input only. |
| /// |
| /// PEM-encoded private key associated with the Client Certificate. If this |
| /// field is used then the 'client_certificate' and the 'ca_certificate' |
| /// fields are mandatory. |
| /// |
| /// Optional. |
| core.String? clientKey; |
| |
| /// Indicates whether the client_key field is set. |
| /// |
| /// Output only. |
| core.bool? clientKeySet; |
| |
| MysqlSslConfig({ |
| this.caCertificate, |
| this.caCertificateSet, |
| this.clientCertificate, |
| this.clientCertificateSet, |
| this.clientKey, |
| this.clientKeySet, |
| }); |
| |
| MysqlSslConfig.fromJson(core.Map json_) |
| : this( |
| caCertificate: json_['caCertificate'] as core.String?, |
| caCertificateSet: json_['caCertificateSet'] as core.bool?, |
| clientCertificate: json_['clientCertificate'] as core.String?, |
| clientCertificateSet: json_['clientCertificateSet'] as core.bool?, |
| clientKey: json_['clientKey'] as core.String?, |
| clientKeySet: json_['clientKeySet'] as core.bool?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final caCertificate = this.caCertificate; |
| final caCertificateSet = this.caCertificateSet; |
| final clientCertificate = this.clientCertificate; |
| final clientCertificateSet = this.clientCertificateSet; |
| final clientKey = this.clientKey; |
| final clientKeySet = this.clientKeySet; |
| return { |
| 'caCertificate': ?caCertificate, |
| 'caCertificateSet': ?caCertificateSet, |
| 'clientCertificate': ?clientCertificate, |
| 'clientCertificateSet': ?clientCertificateSet, |
| 'clientKey': ?clientKey, |
| 'clientKeySet': ?clientKeySet, |
| }; |
| } |
| } |
| |
| /// MySQL table. |
| class MysqlTable { |
| /// MySQL columns in the database. |
| /// |
| /// When unspecified as part of include/exclude objects, includes/excludes |
| /// everything. |
| core.List<MysqlColumn>? mysqlColumns; |
| |
| /// The table name. |
| core.String? table; |
| |
| MysqlTable({this.mysqlColumns, this.table}); |
| |
| MysqlTable.fromJson(core.Map json_) |
| : this( |
| mysqlColumns: (json_['mysqlColumns'] as core.List?) |
| ?.map( |
| (value) => MysqlColumn.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| table: json_['table'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final mysqlColumns = this.mysqlColumns; |
| final table = this.table; |
| return {'mysqlColumns': ?mysqlColumns, 'table': ?table}; |
| } |
| } |
| |
| /// CDC strategy to resume replication from the next available position in the |
| /// source. |
| typedef NextAvailableStartPosition = $Empty; |
| |
| /// OAuth2 Client Credentials. |
| class Oauth2ClientCredentials { |
| /// Client ID for Salesforce OAuth2 Client Credentials. |
| /// |
| /// Required. |
| core.String? clientId; |
| |
| /// Client secret for Salesforce OAuth2 Client Credentials. |
| /// |
| /// Mutually exclusive with the `secret_manager_stored_client_secret` field. |
| /// |
| /// Optional. |
| core.String? clientSecret; |
| |
| /// A reference to a Secret Manager resource name storing the Salesforce |
| /// OAuth2 client_secret. |
| /// |
| /// Mutually exclusive with the `client_secret` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredClientSecret; |
| |
| Oauth2ClientCredentials({ |
| this.clientId, |
| this.clientSecret, |
| this.secretManagerStoredClientSecret, |
| }); |
| |
| Oauth2ClientCredentials.fromJson(core.Map json_) |
| : this( |
| clientId: json_['clientId'] as core.String?, |
| clientSecret: json_['clientSecret'] as core.String?, |
| secretManagerStoredClientSecret: |
| json_['secretManagerStoredClientSecret'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final clientId = this.clientId; |
| final clientSecret = this.clientSecret; |
| final secretManagerStoredClientSecret = |
| this.secretManagerStoredClientSecret; |
| return { |
| 'clientId': ?clientId, |
| 'clientSecret': ?clientSecret, |
| 'secretManagerStoredClientSecret': ?secretManagerStoredClientSecret, |
| }; |
| } |
| } |
| |
| /// Object filter to apply the rules to. |
| class ObjectFilter { |
| /// Specific source object identifier. |
| SourceObjectIdentifier? sourceObjectIdentifier; |
| |
| ObjectFilter({this.sourceObjectIdentifier}); |
| |
| ObjectFilter.fromJson(core.Map json_) |
| : this( |
| sourceObjectIdentifier: json_.containsKey('sourceObjectIdentifier') |
| ? SourceObjectIdentifier.fromJson( |
| json_['sourceObjectIdentifier'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final sourceObjectIdentifier = this.sourceObjectIdentifier; |
| return {'sourceObjectIdentifier': ?sourceObjectIdentifier}; |
| } |
| } |
| |
| /// This resource represents a long-running operation that is the result of a |
| /// network API call. |
| class Operation { |
| /// If the value is `false`, it means the operation is still in progress. |
| /// |
| /// If `true`, the operation is completed, and either `error` or `response` is |
| /// available. |
| core.bool? done; |
| |
| /// The error result of the operation in case of failure or cancellation. |
| Status? error; |
| |
| /// Service-specific metadata associated with the operation. |
| /// |
| /// It typically contains progress information and common metadata such as |
| /// create time. Some services might not provide such metadata. Any method |
| /// that returns a long-running operation should document the metadata type, |
| /// if any. |
| /// |
| /// The values for Object must be JSON objects. It can consist of `num`, |
| /// `String`, `bool` and `null` as well as `Map` and `List` values. |
| core.Map<core.String, core.Object?>? metadata; |
| |
| /// The server-assigned name, which is only unique within the same service |
| /// that originally returns it. |
| /// |
| /// If you use the default HTTP mapping, the `name` should be a resource name |
| /// ending with `operations/{unique_id}`. |
| core.String? name; |
| |
| /// The normal, successful response of the operation. |
| /// |
| /// If the original method returns no data on success, such as `Delete`, the |
| /// response is `google.protobuf.Empty`. If the original method is standard |
| /// `Get`/`Create`/`Update`, the response should be the resource. For other |
| /// methods, the response should have the type `XxxResponse`, where `Xxx` is |
| /// the original method name. For example, if the original method name is |
| /// `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`. |
| /// |
| /// The values for Object must be JSON objects. It can consist of `num`, |
| /// `String`, `bool` and `null` as well as `Map` and `List` values. |
| core.Map<core.String, core.Object?>? response; |
| |
| Operation({this.done, this.error, this.metadata, this.name, this.response}); |
| |
| Operation.fromJson(core.Map json_) |
| : this( |
| done: json_['done'] as core.bool?, |
| error: json_.containsKey('error') |
| ? Status.fromJson( |
| json_['error'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| metadata: json_.containsKey('metadata') |
| ? json_['metadata'] as core.Map<core.String, core.dynamic> |
| : null, |
| name: json_['name'] as core.String?, |
| response: json_.containsKey('response') |
| ? json_['response'] as core.Map<core.String, core.dynamic> |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final done = this.done; |
| final error = this.error; |
| final metadata = this.metadata; |
| final name = this.name; |
| final response = this.response; |
| return { |
| 'done': ?done, |
| 'error': ?error, |
| 'metadata': ?metadata, |
| 'name': ?name, |
| 'response': ?response, |
| }; |
| } |
| } |
| |
| /// Configuration for Oracle Automatic Storage Management (ASM) connection. |
| class OracleAsmConfig { |
| /// ASM service name for the Oracle ASM connection. |
| /// |
| /// Required. |
| core.String? asmService; |
| |
| /// Connection string attributes |
| /// |
| /// Optional. |
| core.Map<core.String, core.String>? connectionAttributes; |
| |
| /// Hostname for the Oracle ASM connection. |
| /// |
| /// Required. |
| core.String? hostname; |
| |
| /// SSL configuration for the Oracle connection. |
| /// |
| /// Optional. |
| OracleSslConfig? oracleSslConfig; |
| |
| /// Password for the Oracle ASM connection. |
| /// |
| /// Mutually exclusive with the `secret_manager_stored_password` field. |
| /// |
| /// Optional. |
| core.String? password; |
| |
| /// Port for the Oracle ASM connection. |
| /// |
| /// Required. |
| core.int? port; |
| |
| /// A reference to a Secret Manager resource name storing the Oracle ASM |
| /// connection password. |
| /// |
| /// Mutually exclusive with the `password` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredPassword; |
| |
| /// Username for the Oracle ASM connection. |
| /// |
| /// Required. |
| core.String? username; |
| |
| OracleAsmConfig({ |
| this.asmService, |
| this.connectionAttributes, |
| this.hostname, |
| this.oracleSslConfig, |
| this.password, |
| this.port, |
| this.secretManagerStoredPassword, |
| this.username, |
| }); |
| |
| OracleAsmConfig.fromJson(core.Map json_) |
| : this( |
| asmService: json_['asmService'] as core.String?, |
| connectionAttributes: |
| (json_['connectionAttributes'] |
| as core.Map<core.String, core.dynamic>?) |
| ?.map((key, value) => core.MapEntry(key, value as core.String)), |
| hostname: json_['hostname'] as core.String?, |
| oracleSslConfig: json_.containsKey('oracleSslConfig') |
| ? OracleSslConfig.fromJson( |
| json_['oracleSslConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| password: json_['password'] as core.String?, |
| port: json_['port'] as core.int?, |
| secretManagerStoredPassword: |
| json_['secretManagerStoredPassword'] as core.String?, |
| username: json_['username'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final asmService = this.asmService; |
| final connectionAttributes = this.connectionAttributes; |
| final hostname = this.hostname; |
| final oracleSslConfig = this.oracleSslConfig; |
| final password = this.password; |
| final port = this.port; |
| final secretManagerStoredPassword = this.secretManagerStoredPassword; |
| final username = this.username; |
| return { |
| 'asmService': ?asmService, |
| 'connectionAttributes': ?connectionAttributes, |
| 'hostname': ?hostname, |
| 'oracleSslConfig': ?oracleSslConfig, |
| 'password': ?password, |
| 'port': ?port, |
| 'secretManagerStoredPassword': ?secretManagerStoredPassword, |
| 'username': ?username, |
| }; |
| } |
| } |
| |
| /// Configuration to use Oracle ASM to access the log files. |
| typedef OracleAsmLogFileAccess = $Empty; |
| |
| /// Oracle Column. |
| class OracleColumn { |
| /// The column name. |
| core.String? column; |
| |
| /// The Oracle data type. |
| core.String? dataType; |
| |
| /// Column encoding. |
| core.String? encoding; |
| |
| /// Column length. |
| core.int? length; |
| |
| /// Whether or not the column can accept a null value. |
| core.bool? nullable; |
| |
| /// The ordinal position of the column in the table. |
| core.int? ordinalPosition; |
| |
| /// Column precision. |
| core.int? precision; |
| |
| /// Whether or not the column represents a primary key. |
| core.bool? primaryKey; |
| |
| /// Column scale. |
| core.int? scale; |
| |
| OracleColumn({ |
| this.column, |
| this.dataType, |
| this.encoding, |
| this.length, |
| this.nullable, |
| this.ordinalPosition, |
| this.precision, |
| this.primaryKey, |
| this.scale, |
| }); |
| |
| OracleColumn.fromJson(core.Map json_) |
| : this( |
| column: json_['column'] as core.String?, |
| dataType: json_['dataType'] as core.String?, |
| encoding: json_['encoding'] as core.String?, |
| length: json_['length'] as core.int?, |
| nullable: json_['nullable'] as core.bool?, |
| ordinalPosition: json_['ordinalPosition'] as core.int?, |
| precision: json_['precision'] as core.int?, |
| primaryKey: json_['primaryKey'] as core.bool?, |
| scale: json_['scale'] as core.int?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final column = this.column; |
| final dataType = this.dataType; |
| final encoding = this.encoding; |
| final length = this.length; |
| final nullable = this.nullable; |
| final ordinalPosition = this.ordinalPosition; |
| final precision = this.precision; |
| final primaryKey = this.primaryKey; |
| final scale = this.scale; |
| return { |
| 'column': ?column, |
| 'dataType': ?dataType, |
| 'encoding': ?encoding, |
| 'length': ?length, |
| 'nullable': ?nullable, |
| 'ordinalPosition': ?ordinalPosition, |
| 'precision': ?precision, |
| 'primaryKey': ?primaryKey, |
| 'scale': ?scale, |
| }; |
| } |
| } |
| |
| /// Oracle data source object identifier. |
| typedef OracleObjectIdentifier = $ObjectIdentifier; |
| |
| /// Profile for connecting to an Oracle source. |
| class OracleProfile { |
| /// Connection string attributes |
| core.Map<core.String, core.String>? connectionAttributes; |
| |
| /// Database for the Oracle connection. |
| /// |
| /// Required. |
| core.String? databaseService; |
| |
| /// Hostname for the Oracle connection. |
| /// |
| /// Required. |
| core.String? hostname; |
| |
| /// Configuration for Oracle ASM connection. |
| /// |
| /// Optional. |
| OracleAsmConfig? oracleAsmConfig; |
| |
| /// SSL configuration for the Oracle connection. |
| /// |
| /// Optional. |
| OracleSslConfig? oracleSslConfig; |
| |
| /// Password for the Oracle connection. |
| /// |
| /// Mutually exclusive with the `secret_manager_stored_password` field. |
| /// |
| /// Optional. |
| core.String? password; |
| |
| /// Port for the Oracle connection, default value is 1521. |
| core.int? port; |
| |
| /// A reference to a Secret Manager resource name storing the Oracle |
| /// connection password. |
| /// |
| /// Mutually exclusive with the `password` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredPassword; |
| |
| /// Username for the Oracle connection. |
| /// |
| /// Required. |
| core.String? username; |
| |
| OracleProfile({ |
| this.connectionAttributes, |
| this.databaseService, |
| this.hostname, |
| this.oracleAsmConfig, |
| this.oracleSslConfig, |
| this.password, |
| this.port, |
| this.secretManagerStoredPassword, |
| this.username, |
| }); |
| |
| OracleProfile.fromJson(core.Map json_) |
| : this( |
| connectionAttributes: |
| (json_['connectionAttributes'] |
| as core.Map<core.String, core.dynamic>?) |
| ?.map((key, value) => core.MapEntry(key, value as core.String)), |
| databaseService: json_['databaseService'] as core.String?, |
| hostname: json_['hostname'] as core.String?, |
| oracleAsmConfig: json_.containsKey('oracleAsmConfig') |
| ? OracleAsmConfig.fromJson( |
| json_['oracleAsmConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| oracleSslConfig: json_.containsKey('oracleSslConfig') |
| ? OracleSslConfig.fromJson( |
| json_['oracleSslConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| password: json_['password'] as core.String?, |
| port: json_['port'] as core.int?, |
| secretManagerStoredPassword: |
| json_['secretManagerStoredPassword'] as core.String?, |
| username: json_['username'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final connectionAttributes = this.connectionAttributes; |
| final databaseService = this.databaseService; |
| final hostname = this.hostname; |
| final oracleAsmConfig = this.oracleAsmConfig; |
| final oracleSslConfig = this.oracleSslConfig; |
| final password = this.password; |
| final port = this.port; |
| final secretManagerStoredPassword = this.secretManagerStoredPassword; |
| final username = this.username; |
| return { |
| 'connectionAttributes': ?connectionAttributes, |
| 'databaseService': ?databaseService, |
| 'hostname': ?hostname, |
| 'oracleAsmConfig': ?oracleAsmConfig, |
| 'oracleSslConfig': ?oracleSslConfig, |
| 'password': ?password, |
| 'port': ?port, |
| 'secretManagerStoredPassword': ?secretManagerStoredPassword, |
| 'username': ?username, |
| }; |
| } |
| } |
| |
| /// Oracle database structure. |
| class OracleRdbms { |
| /// Oracle schemas/databases in the database server. |
| core.List<OracleSchema>? oracleSchemas; |
| |
| OracleRdbms({this.oracleSchemas}); |
| |
| OracleRdbms.fromJson(core.Map json_) |
| : this( |
| oracleSchemas: (json_['oracleSchemas'] as core.List?) |
| ?.map( |
| (value) => OracleSchema.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final oracleSchemas = this.oracleSchemas; |
| return {'oracleSchemas': ?oracleSchemas}; |
| } |
| } |
| |
| /// Oracle schema. |
| class OracleSchema { |
| /// Tables in the schema. |
| core.List<OracleTable>? oracleTables; |
| |
| /// The schema name. |
| core.String? schema; |
| |
| OracleSchema({this.oracleTables, this.schema}); |
| |
| OracleSchema.fromJson(core.Map json_) |
| : this( |
| oracleTables: (json_['oracleTables'] as core.List?) |
| ?.map( |
| (value) => OracleTable.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| schema: json_['schema'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final oracleTables = this.oracleTables; |
| final schema = this.schema; |
| return {'oracleTables': ?oracleTables, 'schema': ?schema}; |
| } |
| } |
| |
| /// Oracle SCN position |
| class OracleScnPosition { |
| /// SCN number from where Logs will be read |
| /// |
| /// Required. |
| core.String? scn; |
| |
| OracleScnPosition({this.scn}); |
| |
| OracleScnPosition.fromJson(core.Map json_) |
| : this(scn: json_['scn'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final scn = this.scn; |
| return {'scn': ?scn}; |
| } |
| } |
| |
| /// Configuration for syncing data from an Oracle source. |
| class OracleSourceConfig { |
| /// Use Binary Log Parser. |
| BinaryLogParser? binaryLogParser; |
| |
| /// Drop large object values. |
| DropLargeObjects? dropLargeObjects; |
| |
| /// The Oracle objects to exclude from the stream. |
| OracleRdbms? excludeObjects; |
| |
| /// The Oracle objects to include in the stream. |
| OracleRdbms? includeObjects; |
| |
| /// Use LogMiner. |
| LogMiner? logMiner; |
| |
| /// Maximum number of concurrent backfill tasks. |
| /// |
| /// The number should be non-negative. If not set (or set to 0), the system's |
| /// default value is used. |
| core.int? maxConcurrentBackfillTasks; |
| |
| /// Maximum number of concurrent CDC tasks. |
| /// |
| /// The number should be non-negative. If not set (or set to 0), the system's |
| /// default value is used. |
| core.int? maxConcurrentCdcTasks; |
| |
| /// Stream large object values. |
| StreamLargeObjects? streamLargeObjects; |
| |
| OracleSourceConfig({ |
| this.binaryLogParser, |
| this.dropLargeObjects, |
| this.excludeObjects, |
| this.includeObjects, |
| this.logMiner, |
| this.maxConcurrentBackfillTasks, |
| this.maxConcurrentCdcTasks, |
| this.streamLargeObjects, |
| }); |
| |
| OracleSourceConfig.fromJson(core.Map json_) |
| : this( |
| binaryLogParser: json_.containsKey('binaryLogParser') |
| ? BinaryLogParser.fromJson( |
| json_['binaryLogParser'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| dropLargeObjects: json_.containsKey('dropLargeObjects') |
| ? DropLargeObjects.fromJson( |
| json_['dropLargeObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| excludeObjects: json_.containsKey('excludeObjects') |
| ? OracleRdbms.fromJson( |
| json_['excludeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| includeObjects: json_.containsKey('includeObjects') |
| ? OracleRdbms.fromJson( |
| json_['includeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| logMiner: json_.containsKey('logMiner') |
| ? LogMiner.fromJson( |
| json_['logMiner'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| maxConcurrentBackfillTasks: |
| json_['maxConcurrentBackfillTasks'] as core.int?, |
| maxConcurrentCdcTasks: json_['maxConcurrentCdcTasks'] as core.int?, |
| streamLargeObjects: json_.containsKey('streamLargeObjects') |
| ? StreamLargeObjects.fromJson( |
| json_['streamLargeObjects'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final binaryLogParser = this.binaryLogParser; |
| final dropLargeObjects = this.dropLargeObjects; |
| final excludeObjects = this.excludeObjects; |
| final includeObjects = this.includeObjects; |
| final logMiner = this.logMiner; |
| final maxConcurrentBackfillTasks = this.maxConcurrentBackfillTasks; |
| final maxConcurrentCdcTasks = this.maxConcurrentCdcTasks; |
| final streamLargeObjects = this.streamLargeObjects; |
| return { |
| 'binaryLogParser': ?binaryLogParser, |
| 'dropLargeObjects': ?dropLargeObjects, |
| 'excludeObjects': ?excludeObjects, |
| 'includeObjects': ?includeObjects, |
| 'logMiner': ?logMiner, |
| 'maxConcurrentBackfillTasks': ?maxConcurrentBackfillTasks, |
| 'maxConcurrentCdcTasks': ?maxConcurrentCdcTasks, |
| 'streamLargeObjects': ?streamLargeObjects, |
| }; |
| } |
| } |
| |
| /// Oracle SSL configuration information. |
| class OracleSslConfig { |
| /// Input only. |
| /// |
| /// PEM-encoded certificate of the CA that signed the source database server's |
| /// certificate. |
| core.String? caCertificate; |
| |
| /// Indicates whether the ca_certificate field has been set for this |
| /// Connection-Profile. |
| /// |
| /// Output only. |
| core.bool? caCertificateSet; |
| |
| /// The distinguished name (DN) mentioned in the server certificate. |
| /// |
| /// This corresponds to SSL_SERVER_CERT_DN sqlnet parameter. Refer |
| /// https://docs.oracle.com/en/database/oracle/oracle-database/19/netrf/local-naming-parameters-in-tns-ora-file.html#GUID-70AB0695-A9AA-4A94-B141-4C605236EEB7 |
| /// If this field is not provided, the DN matching is not enforced. |
| /// |
| /// Optional. |
| core.String? serverCertificateDistinguishedName; |
| |
| OracleSslConfig({ |
| this.caCertificate, |
| this.caCertificateSet, |
| this.serverCertificateDistinguishedName, |
| }); |
| |
| OracleSslConfig.fromJson(core.Map json_) |
| : this( |
| caCertificate: json_['caCertificate'] as core.String?, |
| caCertificateSet: json_['caCertificateSet'] as core.bool?, |
| serverCertificateDistinguishedName: |
| json_['serverCertificateDistinguishedName'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final caCertificate = this.caCertificate; |
| final caCertificateSet = this.caCertificateSet; |
| final serverCertificateDistinguishedName = |
| this.serverCertificateDistinguishedName; |
| return { |
| 'caCertificate': ?caCertificate, |
| 'caCertificateSet': ?caCertificateSet, |
| 'serverCertificateDistinguishedName': ?serverCertificateDistinguishedName, |
| }; |
| } |
| } |
| |
| /// Oracle table. |
| class OracleTable { |
| /// Oracle columns in the schema. |
| /// |
| /// When unspecified as part of include/exclude objects, includes/excludes |
| /// everything. |
| core.List<OracleColumn>? oracleColumns; |
| |
| /// The table name. |
| core.String? table; |
| |
| OracleTable({this.oracleColumns, this.table}); |
| |
| OracleTable.fromJson(core.Map json_) |
| : this( |
| oracleColumns: (json_['oracleColumns'] as core.List?) |
| ?.map( |
| (value) => OracleColumn.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| table: json_['table'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final oracleColumns = this.oracleColumns; |
| final table = this.table; |
| return {'oracleColumns': ?oracleColumns, 'table': ?table}; |
| } |
| } |
| |
| /// PostgreSQL Column. |
| class PostgresqlColumn { |
| /// The column name. |
| core.String? column; |
| |
| /// The PostgreSQL data type. |
| core.String? dataType; |
| |
| /// Column length. |
| core.int? length; |
| |
| /// Whether or not the column can accept a null value. |
| core.bool? nullable; |
| |
| /// The ordinal position of the column in the table. |
| core.int? ordinalPosition; |
| |
| /// Column precision. |
| core.int? precision; |
| |
| /// Whether or not the column represents a primary key. |
| core.bool? primaryKey; |
| |
| /// Column scale. |
| core.int? scale; |
| |
| PostgresqlColumn({ |
| this.column, |
| this.dataType, |
| this.length, |
| this.nullable, |
| this.ordinalPosition, |
| this.precision, |
| this.primaryKey, |
| this.scale, |
| }); |
| |
| PostgresqlColumn.fromJson(core.Map json_) |
| : this( |
| column: json_['column'] as core.String?, |
| dataType: json_['dataType'] as core.String?, |
| length: json_['length'] as core.int?, |
| nullable: json_['nullable'] as core.bool?, |
| ordinalPosition: json_['ordinalPosition'] as core.int?, |
| precision: json_['precision'] as core.int?, |
| primaryKey: json_['primaryKey'] as core.bool?, |
| scale: json_['scale'] as core.int?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final column = this.column; |
| final dataType = this.dataType; |
| final length = this.length; |
| final nullable = this.nullable; |
| final ordinalPosition = this.ordinalPosition; |
| final precision = this.precision; |
| final primaryKey = this.primaryKey; |
| final scale = this.scale; |
| return { |
| 'column': ?column, |
| 'dataType': ?dataType, |
| 'length': ?length, |
| 'nullable': ?nullable, |
| 'ordinalPosition': ?ordinalPosition, |
| 'precision': ?precision, |
| 'primaryKey': ?primaryKey, |
| 'scale': ?scale, |
| }; |
| } |
| } |
| |
| /// PostgreSQL data source object identifier. |
| typedef PostgresqlObjectIdentifier = $ObjectIdentifier; |
| |
| /// Profile for connecting to a PostgreSQL source. |
| class PostgresqlProfile { |
| /// Database for the PostgreSQL connection. |
| /// |
| /// Required. |
| core.String? database; |
| |
| /// Hostname for the PostgreSQL connection. |
| /// |
| /// Required. |
| core.String? hostname; |
| |
| /// Password for the PostgreSQL connection. |
| /// |
| /// Mutually exclusive with the `secret_manager_stored_password` field. |
| /// |
| /// Optional. |
| core.String? password; |
| |
| /// Port for the PostgreSQL connection, default value is 5432. |
| core.int? port; |
| |
| /// A reference to a Secret Manager resource name storing the PostgreSQL |
| /// connection password. |
| /// |
| /// Mutually exclusive with the `password` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredPassword; |
| |
| /// SSL configuration for the PostgreSQL connection. |
| /// |
| /// In case PostgresqlSslConfig is not set, the connection will use the |
| /// default SSL mode, which is `prefer` (i.e. this mode will only use |
| /// encryption if enabled from database side, otherwise will use unencrypted |
| /// communication) |
| /// |
| /// Optional. |
| PostgresqlSslConfig? sslConfig; |
| |
| /// Username for the PostgreSQL connection. |
| /// |
| /// Required. |
| core.String? username; |
| |
| PostgresqlProfile({ |
| this.database, |
| this.hostname, |
| this.password, |
| this.port, |
| this.secretManagerStoredPassword, |
| this.sslConfig, |
| this.username, |
| }); |
| |
| PostgresqlProfile.fromJson(core.Map json_) |
| : this( |
| database: json_['database'] as core.String?, |
| hostname: json_['hostname'] as core.String?, |
| password: json_['password'] as core.String?, |
| port: json_['port'] as core.int?, |
| secretManagerStoredPassword: |
| json_['secretManagerStoredPassword'] as core.String?, |
| sslConfig: json_.containsKey('sslConfig') |
| ? PostgresqlSslConfig.fromJson( |
| json_['sslConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| username: json_['username'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final database = this.database; |
| final hostname = this.hostname; |
| final password = this.password; |
| final port = this.port; |
| final secretManagerStoredPassword = this.secretManagerStoredPassword; |
| final sslConfig = this.sslConfig; |
| final username = this.username; |
| return { |
| 'database': ?database, |
| 'hostname': ?hostname, |
| 'password': ?password, |
| 'port': ?port, |
| 'secretManagerStoredPassword': ?secretManagerStoredPassword, |
| 'sslConfig': ?sslConfig, |
| 'username': ?username, |
| }; |
| } |
| } |
| |
| /// PostgreSQL database structure. |
| class PostgresqlRdbms { |
| /// PostgreSQL schemas in the database server. |
| core.List<PostgresqlSchema>? postgresqlSchemas; |
| |
| PostgresqlRdbms({this.postgresqlSchemas}); |
| |
| PostgresqlRdbms.fromJson(core.Map json_) |
| : this( |
| postgresqlSchemas: (json_['postgresqlSchemas'] as core.List?) |
| ?.map( |
| (value) => PostgresqlSchema.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final postgresqlSchemas = this.postgresqlSchemas; |
| return {'postgresqlSchemas': ?postgresqlSchemas}; |
| } |
| } |
| |
| /// PostgreSQL schema. |
| class PostgresqlSchema { |
| /// Tables in the schema. |
| core.List<PostgresqlTable>? postgresqlTables; |
| |
| /// The schema name. |
| core.String? schema; |
| |
| PostgresqlSchema({this.postgresqlTables, this.schema}); |
| |
| PostgresqlSchema.fromJson(core.Map json_) |
| : this( |
| postgresqlTables: (json_['postgresqlTables'] as core.List?) |
| ?.map( |
| (value) => PostgresqlTable.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| schema: json_['schema'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final postgresqlTables = this.postgresqlTables; |
| final schema = this.schema; |
| return {'postgresqlTables': ?postgresqlTables, 'schema': ?schema}; |
| } |
| } |
| |
| /// Configuration for syncing data from a PostgreSQL source. |
| class PostgresqlSourceConfig { |
| /// The PostgreSQL objects to exclude from the stream. |
| PostgresqlRdbms? excludeObjects; |
| |
| /// The PostgreSQL objects to include in the stream. |
| PostgresqlRdbms? includeObjects; |
| |
| /// Maximum number of concurrent backfill tasks. |
| /// |
| /// The number should be non negative. If not set (or set to 0), the system's |
| /// default value will be used. |
| core.int? maxConcurrentBackfillTasks; |
| |
| /// The name of the publication that includes the set of all tables that are |
| /// defined in the stream's include_objects. |
| /// |
| /// Required. |
| core.String? publication; |
| |
| /// The name of the logical replication slot that's configured with the |
| /// pgoutput plugin. |
| /// |
| /// Required. Immutable. |
| core.String? replicationSlot; |
| |
| PostgresqlSourceConfig({ |
| this.excludeObjects, |
| this.includeObjects, |
| this.maxConcurrentBackfillTasks, |
| this.publication, |
| this.replicationSlot, |
| }); |
| |
| PostgresqlSourceConfig.fromJson(core.Map json_) |
| : this( |
| excludeObjects: json_.containsKey('excludeObjects') |
| ? PostgresqlRdbms.fromJson( |
| json_['excludeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| includeObjects: json_.containsKey('includeObjects') |
| ? PostgresqlRdbms.fromJson( |
| json_['includeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| maxConcurrentBackfillTasks: |
| json_['maxConcurrentBackfillTasks'] as core.int?, |
| publication: json_['publication'] as core.String?, |
| replicationSlot: json_['replicationSlot'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final excludeObjects = this.excludeObjects; |
| final includeObjects = this.includeObjects; |
| final maxConcurrentBackfillTasks = this.maxConcurrentBackfillTasks; |
| final publication = this.publication; |
| final replicationSlot = this.replicationSlot; |
| return { |
| 'excludeObjects': ?excludeObjects, |
| 'includeObjects': ?includeObjects, |
| 'maxConcurrentBackfillTasks': ?maxConcurrentBackfillTasks, |
| 'publication': ?publication, |
| 'replicationSlot': ?replicationSlot, |
| }; |
| } |
| } |
| |
| /// PostgreSQL SSL configuration information. |
| class PostgresqlSslConfig { |
| /// If this field is set, the communication will be encrypted with TLS |
| /// encryption and both the server identity and the client identity will be |
| /// authenticated. |
| ServerAndClientVerification? serverAndClientVerification; |
| |
| /// If this field is set, the communication will be encrypted with TLS |
| /// encryption and the server identity will be authenticated. |
| ServerVerification? serverVerification; |
| |
| PostgresqlSslConfig({ |
| this.serverAndClientVerification, |
| this.serverVerification, |
| }); |
| |
| PostgresqlSslConfig.fromJson(core.Map json_) |
| : this( |
| serverAndClientVerification: |
| json_.containsKey('serverAndClientVerification') |
| ? ServerAndClientVerification.fromJson( |
| json_['serverAndClientVerification'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| serverVerification: json_.containsKey('serverVerification') |
| ? ServerVerification.fromJson( |
| json_['serverVerification'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final serverAndClientVerification = this.serverAndClientVerification; |
| final serverVerification = this.serverVerification; |
| return { |
| 'serverAndClientVerification': ?serverAndClientVerification, |
| 'serverVerification': ?serverVerification, |
| }; |
| } |
| } |
| |
| /// PostgreSQL table. |
| class PostgresqlTable { |
| /// PostgreSQL columns in the schema. |
| /// |
| /// When unspecified as part of include/exclude objects, includes/excludes |
| /// everything. |
| core.List<PostgresqlColumn>? postgresqlColumns; |
| |
| /// The table name. |
| core.String? table; |
| |
| PostgresqlTable({this.postgresqlColumns, this.table}); |
| |
| PostgresqlTable.fromJson(core.Map json_) |
| : this( |
| postgresqlColumns: (json_['postgresqlColumns'] as core.List?) |
| ?.map( |
| (value) => PostgresqlColumn.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| table: json_['table'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final postgresqlColumns = this.postgresqlColumns; |
| final table = this.table; |
| return {'postgresqlColumns': ?postgresqlColumns, 'table': ?table}; |
| } |
| } |
| |
| /// The PrivateConnection resource is used to establish private connectivity |
| /// between Datastream and a customer's network. |
| class PrivateConnection { |
| /// The create time of the resource. |
| /// |
| /// Output only. |
| core.String? createTime; |
| |
| /// Display name. |
| /// |
| /// Required. |
| core.String? displayName; |
| |
| /// In case of error, the details of the error in a user-friendly format. |
| /// |
| /// Output only. |
| Error? error; |
| |
| /// Labels. |
| core.Map<core.String, core.String>? labels; |
| |
| /// Identifier. |
| /// |
| /// The resource's name. |
| /// |
| /// Output only. |
| core.String? name; |
| |
| /// PSC Interface Config. |
| PscInterfaceConfig? pscInterfaceConfig; |
| |
| /// Reserved for future use. |
| /// |
| /// Output only. |
| core.bool? satisfiesPzi; |
| |
| /// Reserved for future use. |
| /// |
| /// Output only. |
| core.bool? satisfiesPzs; |
| |
| /// The state of the Private Connection. |
| /// |
| /// Output only. |
| /// Possible string values are: |
| /// - "STATE_UNSPECIFIED" : Unspecified state. |
| /// - "CREATING" : The private connection is in creation state - creating |
| /// resources. |
| /// - "CREATED" : The private connection has been created with all of its |
| /// resources. |
| /// - "FAILED" : The private connection creation has failed. |
| /// - "DELETING" : The private connection is being deleted. |
| /// - "FAILED_TO_DELETE" : Delete request has failed, resource is in invalid |
| /// state. |
| core.String? state; |
| |
| /// The update time of the resource. |
| /// |
| /// Output only. |
| core.String? updateTime; |
| |
| /// VPC Peering Config. |
| VpcPeeringConfig? vpcPeeringConfig; |
| |
| PrivateConnection({ |
| this.createTime, |
| this.displayName, |
| this.error, |
| this.labels, |
| this.name, |
| this.pscInterfaceConfig, |
| this.satisfiesPzi, |
| this.satisfiesPzs, |
| this.state, |
| this.updateTime, |
| this.vpcPeeringConfig, |
| }); |
| |
| PrivateConnection.fromJson(core.Map json_) |
| : this( |
| createTime: json_['createTime'] as core.String?, |
| displayName: json_['displayName'] as core.String?, |
| error: json_.containsKey('error') |
| ? Error.fromJson( |
| json_['error'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| labels: (json_['labels'] as core.Map<core.String, core.dynamic>?)?.map( |
| (key, value) => core.MapEntry(key, value as core.String), |
| ), |
| name: json_['name'] as core.String?, |
| pscInterfaceConfig: json_.containsKey('pscInterfaceConfig') |
| ? PscInterfaceConfig.fromJson( |
| json_['pscInterfaceConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| satisfiesPzi: json_['satisfiesPzi'] as core.bool?, |
| satisfiesPzs: json_['satisfiesPzs'] as core.bool?, |
| state: json_['state'] as core.String?, |
| updateTime: json_['updateTime'] as core.String?, |
| vpcPeeringConfig: json_.containsKey('vpcPeeringConfig') |
| ? VpcPeeringConfig.fromJson( |
| json_['vpcPeeringConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final createTime = this.createTime; |
| final displayName = this.displayName; |
| final error = this.error; |
| final labels = this.labels; |
| final name = this.name; |
| final pscInterfaceConfig = this.pscInterfaceConfig; |
| final satisfiesPzi = this.satisfiesPzi; |
| final satisfiesPzs = this.satisfiesPzs; |
| final state = this.state; |
| final updateTime = this.updateTime; |
| final vpcPeeringConfig = this.vpcPeeringConfig; |
| return { |
| 'createTime': ?createTime, |
| 'displayName': ?displayName, |
| 'error': ?error, |
| 'labels': ?labels, |
| 'name': ?name, |
| 'pscInterfaceConfig': ?pscInterfaceConfig, |
| 'satisfiesPzi': ?satisfiesPzi, |
| 'satisfiesPzs': ?satisfiesPzs, |
| 'state': ?state, |
| 'updateTime': ?updateTime, |
| 'vpcPeeringConfig': ?vpcPeeringConfig, |
| }; |
| } |
| } |
| |
| /// Private Connectivity |
| class PrivateConnectivity { |
| /// A reference to a private connection resource. |
| /// |
| /// Format: |
| /// `projects/{project}/locations/{location}/privateConnections/{name}` |
| /// |
| /// Required. |
| core.String? privateConnection; |
| |
| PrivateConnectivity({this.privateConnection}); |
| |
| PrivateConnectivity.fromJson(core.Map json_) |
| : this(privateConnection: json_['privateConnection'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final privateConnection = this.privateConnection; |
| return {'privateConnection': ?privateConnection}; |
| } |
| } |
| |
| /// The PSC Interface configuration is used to create PSC Interface between |
| /// Datastream and the consumer's PSC. |
| class PscInterfaceConfig { |
| /// Fully qualified name of the Network Attachment that Datastream will |
| /// connect to. |
| /// |
| /// Format: `projects/{project}/regions/{region}/networkAttachments/{name}` |
| /// |
| /// Required. |
| core.String? networkAttachment; |
| |
| PscInterfaceConfig({this.networkAttachment}); |
| |
| PscInterfaceConfig.fromJson(core.Map json_) |
| : this(networkAttachment: json_['networkAttachment'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final networkAttachment = this.networkAttachment; |
| return {'networkAttachment': ?networkAttachment}; |
| } |
| } |
| |
| /// The route resource is the child of the private connection resource, used for |
| /// defining a route for a private connection. |
| class Route { |
| /// The create time of the resource. |
| /// |
| /// Output only. |
| core.String? createTime; |
| |
| /// Destination address for connection |
| /// |
| /// Required. |
| core.String? destinationAddress; |
| |
| /// Destination port for connection |
| core.int? destinationPort; |
| |
| /// Display name. |
| /// |
| /// Required. |
| core.String? displayName; |
| |
| /// Labels. |
| core.Map<core.String, core.String>? labels; |
| |
| /// Identifier. |
| /// |
| /// The resource's name. |
| /// |
| /// Output only. |
| core.String? name; |
| |
| /// The update time of the resource. |
| /// |
| /// Output only. |
| core.String? updateTime; |
| |
| Route({ |
| this.createTime, |
| this.destinationAddress, |
| this.destinationPort, |
| this.displayName, |
| this.labels, |
| this.name, |
| this.updateTime, |
| }); |
| |
| Route.fromJson(core.Map json_) |
| : this( |
| createTime: json_['createTime'] as core.String?, |
| destinationAddress: json_['destinationAddress'] as core.String?, |
| destinationPort: json_['destinationPort'] as core.int?, |
| displayName: json_['displayName'] as core.String?, |
| labels: (json_['labels'] as core.Map<core.String, core.dynamic>?)?.map( |
| (key, value) => core.MapEntry(key, value as core.String), |
| ), |
| name: json_['name'] as core.String?, |
| updateTime: json_['updateTime'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final createTime = this.createTime; |
| final destinationAddress = this.destinationAddress; |
| final destinationPort = this.destinationPort; |
| final displayName = this.displayName; |
| final labels = this.labels; |
| final name = this.name; |
| final updateTime = this.updateTime; |
| return { |
| 'createTime': ?createTime, |
| 'destinationAddress': ?destinationAddress, |
| 'destinationPort': ?destinationPort, |
| 'displayName': ?displayName, |
| 'labels': ?labels, |
| 'name': ?name, |
| 'updateTime': ?updateTime, |
| }; |
| } |
| } |
| |
| /// A set of rules to apply to a set of objects. |
| class RuleSet { |
| /// List of customization rules to apply. |
| /// |
| /// Required. |
| core.List<CustomizationRule>? customizationRules; |
| |
| /// Object filter to apply the customization rules to. |
| /// |
| /// Required. |
| ObjectFilter? objectFilter; |
| |
| RuleSet({this.customizationRules, this.objectFilter}); |
| |
| RuleSet.fromJson(core.Map json_) |
| : this( |
| customizationRules: (json_['customizationRules'] as core.List?) |
| ?.map( |
| (value) => CustomizationRule.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| objectFilter: json_.containsKey('objectFilter') |
| ? ObjectFilter.fromJson( |
| json_['objectFilter'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final customizationRules = this.customizationRules; |
| final objectFilter = this.objectFilter; |
| return { |
| 'customizationRules': ?customizationRules, |
| 'objectFilter': ?objectFilter, |
| }; |
| } |
| } |
| |
| /// Request message for running a stream. |
| class RunStreamRequest { |
| /// The CDC strategy of the stream. |
| /// |
| /// If not set, the system's default value will be used. |
| /// |
| /// Optional. |
| CdcStrategy? cdcStrategy; |
| |
| /// Update the stream without validating it. |
| /// |
| /// Optional. |
| core.bool? force; |
| |
| RunStreamRequest({this.cdcStrategy, this.force}); |
| |
| RunStreamRequest.fromJson(core.Map json_) |
| : this( |
| cdcStrategy: json_.containsKey('cdcStrategy') |
| ? CdcStrategy.fromJson( |
| json_['cdcStrategy'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| force: json_['force'] as core.bool?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final cdcStrategy = this.cdcStrategy; |
| final force = this.force; |
| return {'cdcStrategy': ?cdcStrategy, 'force': ?force}; |
| } |
| } |
| |
| /// Salesforce field. |
| class SalesforceField { |
| /// The data type. |
| core.String? dataType; |
| |
| /// The field name. |
| core.String? name; |
| |
| /// Indicates whether the field can accept nil values. |
| core.bool? nillable; |
| |
| SalesforceField({this.dataType, this.name, this.nillable}); |
| |
| SalesforceField.fromJson(core.Map json_) |
| : this( |
| dataType: json_['dataType'] as core.String?, |
| name: json_['name'] as core.String?, |
| nillable: json_['nillable'] as core.bool?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final dataType = this.dataType; |
| final name = this.name; |
| final nillable = this.nillable; |
| return {'dataType': ?dataType, 'name': ?name, 'nillable': ?nillable}; |
| } |
| } |
| |
| /// Salesforce object. |
| class SalesforceObject { |
| /// Salesforce fields. |
| /// |
| /// When unspecified as part of include objects, includes everything, when |
| /// unspecified as part of exclude objects, excludes nothing. |
| core.List<SalesforceField>? fields; |
| |
| /// The object name. |
| core.String? objectName; |
| |
| SalesforceObject({this.fields, this.objectName}); |
| |
| SalesforceObject.fromJson(core.Map json_) |
| : this( |
| fields: (json_['fields'] as core.List?) |
| ?.map( |
| (value) => SalesforceField.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| objectName: json_['objectName'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final fields = this.fields; |
| final objectName = this.objectName; |
| return {'fields': ?fields, 'objectName': ?objectName}; |
| } |
| } |
| |
| /// Salesforce data source object identifier. |
| class SalesforceObjectIdentifier { |
| /// The object name. |
| /// |
| /// Required. |
| core.String? objectName; |
| |
| SalesforceObjectIdentifier({this.objectName}); |
| |
| SalesforceObjectIdentifier.fromJson(core.Map json_) |
| : this(objectName: json_['objectName'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final objectName = this.objectName; |
| return {'objectName': ?objectName}; |
| } |
| } |
| |
| /// Salesforce organization structure. |
| class SalesforceOrg { |
| /// Salesforce objects in the database server. |
| core.List<SalesforceObject>? objects; |
| |
| SalesforceOrg({this.objects}); |
| |
| SalesforceOrg.fromJson(core.Map json_) |
| : this( |
| objects: (json_['objects'] as core.List?) |
| ?.map( |
| (value) => SalesforceObject.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final objects = this.objects; |
| return {'objects': ?objects}; |
| } |
| } |
| |
| /// Profile for connecting to a Salesforce source. |
| class SalesforceProfile { |
| /// Domain endpoint for the Salesforce connection. |
| /// |
| /// Required. |
| core.String? domain; |
| |
| /// Connected app authentication. |
| Oauth2ClientCredentials? oauth2ClientCredentials; |
| |
| /// User-password authentication. |
| UserCredentials? userCredentials; |
| |
| SalesforceProfile({ |
| this.domain, |
| this.oauth2ClientCredentials, |
| this.userCredentials, |
| }); |
| |
| SalesforceProfile.fromJson(core.Map json_) |
| : this( |
| domain: json_['domain'] as core.String?, |
| oauth2ClientCredentials: json_.containsKey('oauth2ClientCredentials') |
| ? Oauth2ClientCredentials.fromJson( |
| json_['oauth2ClientCredentials'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| userCredentials: json_.containsKey('userCredentials') |
| ? UserCredentials.fromJson( |
| json_['userCredentials'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final domain = this.domain; |
| final oauth2ClientCredentials = this.oauth2ClientCredentials; |
| final userCredentials = this.userCredentials; |
| return { |
| 'domain': ?domain, |
| 'oauth2ClientCredentials': ?oauth2ClientCredentials, |
| 'userCredentials': ?userCredentials, |
| }; |
| } |
| } |
| |
| /// Configuration for syncing data from a Salesforce source. |
| class SalesforceSourceConfig { |
| /// The Salesforce objects to exclude from the stream. |
| SalesforceOrg? excludeObjects; |
| |
| /// The Salesforce objects to retrieve from the source. |
| SalesforceOrg? includeObjects; |
| |
| /// Salesforce objects polling interval. |
| /// |
| /// The interval at which new changes will be polled for each object. The |
| /// duration must be from `5 minutes` to `24 hours`, inclusive. |
| /// |
| /// Required. |
| core.String? pollingInterval; |
| |
| SalesforceSourceConfig({ |
| this.excludeObjects, |
| this.includeObjects, |
| this.pollingInterval, |
| }); |
| |
| SalesforceSourceConfig.fromJson(core.Map json_) |
| : this( |
| excludeObjects: json_.containsKey('excludeObjects') |
| ? SalesforceOrg.fromJson( |
| json_['excludeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| includeObjects: json_.containsKey('includeObjects') |
| ? SalesforceOrg.fromJson( |
| json_['includeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| pollingInterval: json_['pollingInterval'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final excludeObjects = this.excludeObjects; |
| final includeObjects = this.includeObjects; |
| final pollingInterval = this.pollingInterval; |
| return { |
| 'excludeObjects': ?excludeObjects, |
| 'includeObjects': ?includeObjects, |
| 'pollingInterval': ?pollingInterval, |
| }; |
| } |
| } |
| |
| /// Message represents the option where Datastream will enforce the encryption |
| /// and authenticate the server identity as well as the client identity. |
| /// |
| /// ca_certificate, client_certificate and client_key must be set if user |
| /// selects this option. |
| class ServerAndClientVerification { |
| /// Input only. |
| /// |
| /// PEM-encoded server root CA certificate. |
| /// |
| /// Required. |
| core.String? caCertificate; |
| |
| /// Input only. |
| /// |
| /// PEM-encoded certificate used by the source database to authenticate the |
| /// client identity (i.e., the Datastream's identity). This certificate is |
| /// signed by either a root certificate trusted by the server or one or more |
| /// intermediate certificates (which is stored with the leaf certificate) to |
| /// link the this certificate to the trusted root certificate. |
| /// |
| /// Required. |
| core.String? clientCertificate; |
| |
| /// Input only. |
| /// |
| /// PEM-encoded private key associated with the client certificate. This value |
| /// will be used during the SSL/TLS handshake, allowing the PostgreSQL server |
| /// to authenticate the client's identity, i.e. identity of the Datastream. |
| /// |
| /// Optional. |
| core.String? clientKey; |
| |
| /// The hostname mentioned in the Subject or SAN extension of the server |
| /// certificate. |
| /// |
| /// If this field is not provided, the hostname in the server certificate is |
| /// not validated. |
| /// |
| /// Optional. |
| core.String? serverCertificateHostname; |
| |
| ServerAndClientVerification({ |
| this.caCertificate, |
| this.clientCertificate, |
| this.clientKey, |
| this.serverCertificateHostname, |
| }); |
| |
| ServerAndClientVerification.fromJson(core.Map json_) |
| : this( |
| caCertificate: json_['caCertificate'] as core.String?, |
| clientCertificate: json_['clientCertificate'] as core.String?, |
| clientKey: json_['clientKey'] as core.String?, |
| serverCertificateHostname: |
| json_['serverCertificateHostname'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final caCertificate = this.caCertificate; |
| final clientCertificate = this.clientCertificate; |
| final clientKey = this.clientKey; |
| final serverCertificateHostname = this.serverCertificateHostname; |
| return { |
| 'caCertificate': ?caCertificate, |
| 'clientCertificate': ?clientCertificate, |
| 'clientKey': ?clientKey, |
| 'serverCertificateHostname': ?serverCertificateHostname, |
| }; |
| } |
| } |
| |
| /// Message represents the option where Datastream will enforce the encryption |
| /// and authenticate the server identity. |
| /// |
| /// ca_certificate must be set if user selects this option. |
| class ServerVerification { |
| /// Input only. |
| /// |
| /// PEM-encoded server root CA certificate. |
| /// |
| /// Required. |
| core.String? caCertificate; |
| |
| /// The hostname mentioned in the Subject or SAN extension of the server |
| /// certificate. |
| /// |
| /// If this field is not provided, the hostname in the server certificate is |
| /// not validated. |
| /// |
| /// Optional. |
| core.String? serverCertificateHostname; |
| |
| ServerVerification({this.caCertificate, this.serverCertificateHostname}); |
| |
| ServerVerification.fromJson(core.Map json_) |
| : this( |
| caCertificate: json_['caCertificate'] as core.String?, |
| serverCertificateHostname: |
| json_['serverCertificateHostname'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final caCertificate = this.caCertificate; |
| final serverCertificateHostname = this.serverCertificateHostname; |
| return { |
| 'caCertificate': ?caCertificate, |
| 'serverCertificateHostname': ?serverCertificateHostname, |
| }; |
| } |
| } |
| |
| /// A single target dataset to which all data will be streamed. |
| class SingleTargetDataset { |
| /// The dataset ID of the target dataset. |
| /// |
| /// DatasetIds allowed characters: |
| /// https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#datasetreference. |
| core.String? datasetId; |
| |
| SingleTargetDataset({this.datasetId}); |
| |
| SingleTargetDataset.fromJson(core.Map json_) |
| : this(datasetId: json_['datasetId'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final datasetId = this.datasetId; |
| return {'datasetId': ?datasetId}; |
| } |
| } |
| |
| /// The configuration of the stream source. |
| class SourceConfig { |
| /// MongoDB data source configuration. |
| MongodbSourceConfig? mongodbSourceConfig; |
| |
| /// MySQL data source configuration. |
| MysqlSourceConfig? mysqlSourceConfig; |
| |
| /// Oracle data source configuration. |
| OracleSourceConfig? oracleSourceConfig; |
| |
| /// PostgreSQL data source configuration. |
| PostgresqlSourceConfig? postgresqlSourceConfig; |
| |
| /// Salesforce data source configuration. |
| SalesforceSourceConfig? salesforceSourceConfig; |
| |
| /// Source connection profile resource. |
| /// |
| /// Format: |
| /// `projects/{project}/locations/{location}/connectionProfiles/{name}` |
| /// |
| /// Required. |
| core.String? sourceConnectionProfile; |
| |
| /// Spanner data source configuration. |
| SpannerSourceConfig? spannerSourceConfig; |
| |
| /// SQLServer data source configuration. |
| SqlServerSourceConfig? sqlServerSourceConfig; |
| |
| SourceConfig({ |
| this.mongodbSourceConfig, |
| this.mysqlSourceConfig, |
| this.oracleSourceConfig, |
| this.postgresqlSourceConfig, |
| this.salesforceSourceConfig, |
| this.sourceConnectionProfile, |
| this.spannerSourceConfig, |
| this.sqlServerSourceConfig, |
| }); |
| |
| SourceConfig.fromJson(core.Map json_) |
| : this( |
| mongodbSourceConfig: json_.containsKey('mongodbSourceConfig') |
| ? MongodbSourceConfig.fromJson( |
| json_['mongodbSourceConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| mysqlSourceConfig: json_.containsKey('mysqlSourceConfig') |
| ? MysqlSourceConfig.fromJson( |
| json_['mysqlSourceConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| oracleSourceConfig: json_.containsKey('oracleSourceConfig') |
| ? OracleSourceConfig.fromJson( |
| json_['oracleSourceConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| postgresqlSourceConfig: json_.containsKey('postgresqlSourceConfig') |
| ? PostgresqlSourceConfig.fromJson( |
| json_['postgresqlSourceConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| salesforceSourceConfig: json_.containsKey('salesforceSourceConfig') |
| ? SalesforceSourceConfig.fromJson( |
| json_['salesforceSourceConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sourceConnectionProfile: |
| json_['sourceConnectionProfile'] as core.String?, |
| spannerSourceConfig: json_.containsKey('spannerSourceConfig') |
| ? SpannerSourceConfig.fromJson( |
| json_['spannerSourceConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sqlServerSourceConfig: json_.containsKey('sqlServerSourceConfig') |
| ? SqlServerSourceConfig.fromJson( |
| json_['sqlServerSourceConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final mongodbSourceConfig = this.mongodbSourceConfig; |
| final mysqlSourceConfig = this.mysqlSourceConfig; |
| final oracleSourceConfig = this.oracleSourceConfig; |
| final postgresqlSourceConfig = this.postgresqlSourceConfig; |
| final salesforceSourceConfig = this.salesforceSourceConfig; |
| final sourceConnectionProfile = this.sourceConnectionProfile; |
| final spannerSourceConfig = this.spannerSourceConfig; |
| final sqlServerSourceConfig = this.sqlServerSourceConfig; |
| return { |
| 'mongodbSourceConfig': ?mongodbSourceConfig, |
| 'mysqlSourceConfig': ?mysqlSourceConfig, |
| 'oracleSourceConfig': ?oracleSourceConfig, |
| 'postgresqlSourceConfig': ?postgresqlSourceConfig, |
| 'salesforceSourceConfig': ?salesforceSourceConfig, |
| 'sourceConnectionProfile': ?sourceConnectionProfile, |
| 'spannerSourceConfig': ?spannerSourceConfig, |
| 'sqlServerSourceConfig': ?sqlServerSourceConfig, |
| }; |
| } |
| } |
| |
| /// Destination datasets are created so that hierarchy of the destination data |
| /// objects matches the source hierarchy. |
| class SourceHierarchyDatasets { |
| /// The dataset template to use for dynamic dataset creation. |
| DatasetTemplate? datasetTemplate; |
| |
| /// The project id of the BigQuery dataset. |
| /// |
| /// If not specified, the project will be inferred from the stream resource. |
| /// |
| /// Optional. |
| core.String? projectId; |
| |
| SourceHierarchyDatasets({this.datasetTemplate, this.projectId}); |
| |
| SourceHierarchyDatasets.fromJson(core.Map json_) |
| : this( |
| datasetTemplate: json_.containsKey('datasetTemplate') |
| ? DatasetTemplate.fromJson( |
| json_['datasetTemplate'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| projectId: json_['projectId'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final datasetTemplate = this.datasetTemplate; |
| final projectId = this.projectId; |
| return {'datasetTemplate': ?datasetTemplate, 'projectId': ?projectId}; |
| } |
| } |
| |
| /// Represents an identifier of an object in the data source. |
| class SourceObjectIdentifier { |
| /// MongoDB data source object identifier. |
| MongodbObjectIdentifier? mongodbIdentifier; |
| |
| /// Mysql data source object identifier. |
| MysqlObjectIdentifier? mysqlIdentifier; |
| |
| /// Oracle data source object identifier. |
| OracleObjectIdentifier? oracleIdentifier; |
| |
| /// PostgreSQL data source object identifier. |
| PostgresqlObjectIdentifier? postgresqlIdentifier; |
| |
| /// Salesforce data source object identifier. |
| SalesforceObjectIdentifier? salesforceIdentifier; |
| |
| /// Spanner data source object identifier. |
| SpannerObjectIdentifier? spannerIdentifier; |
| |
| /// SQLServer data source object identifier. |
| SqlServerObjectIdentifier? sqlServerIdentifier; |
| |
| SourceObjectIdentifier({ |
| this.mongodbIdentifier, |
| this.mysqlIdentifier, |
| this.oracleIdentifier, |
| this.postgresqlIdentifier, |
| this.salesforceIdentifier, |
| this.spannerIdentifier, |
| this.sqlServerIdentifier, |
| }); |
| |
| SourceObjectIdentifier.fromJson(core.Map json_) |
| : this( |
| mongodbIdentifier: json_.containsKey('mongodbIdentifier') |
| ? MongodbObjectIdentifier.fromJson( |
| json_['mongodbIdentifier'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| mysqlIdentifier: json_.containsKey('mysqlIdentifier') |
| ? MysqlObjectIdentifier.fromJson( |
| json_['mysqlIdentifier'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| oracleIdentifier: json_.containsKey('oracleIdentifier') |
| ? OracleObjectIdentifier.fromJson( |
| json_['oracleIdentifier'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| postgresqlIdentifier: json_.containsKey('postgresqlIdentifier') |
| ? PostgresqlObjectIdentifier.fromJson( |
| json_['postgresqlIdentifier'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| salesforceIdentifier: json_.containsKey('salesforceIdentifier') |
| ? SalesforceObjectIdentifier.fromJson( |
| json_['salesforceIdentifier'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| spannerIdentifier: json_.containsKey('spannerIdentifier') |
| ? SpannerObjectIdentifier.fromJson( |
| json_['spannerIdentifier'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sqlServerIdentifier: json_.containsKey('sqlServerIdentifier') |
| ? SqlServerObjectIdentifier.fromJson( |
| json_['sqlServerIdentifier'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final mongodbIdentifier = this.mongodbIdentifier; |
| final mysqlIdentifier = this.mysqlIdentifier; |
| final oracleIdentifier = this.oracleIdentifier; |
| final postgresqlIdentifier = this.postgresqlIdentifier; |
| final salesforceIdentifier = this.salesforceIdentifier; |
| final spannerIdentifier = this.spannerIdentifier; |
| final sqlServerIdentifier = this.sqlServerIdentifier; |
| return { |
| 'mongodbIdentifier': ?mongodbIdentifier, |
| 'mysqlIdentifier': ?mysqlIdentifier, |
| 'oracleIdentifier': ?oracleIdentifier, |
| 'postgresqlIdentifier': ?postgresqlIdentifier, |
| 'salesforceIdentifier': ?salesforceIdentifier, |
| 'spannerIdentifier': ?spannerIdentifier, |
| 'sqlServerIdentifier': ?sqlServerIdentifier, |
| }; |
| } |
| } |
| |
| /// Spanner column. |
| class SpannerColumn { |
| /// The column name. |
| /// |
| /// Required. |
| core.String? column; |
| |
| /// Spanner data type. |
| /// |
| /// Optional. |
| core.String? dataType; |
| |
| /// Whether or not the column is a primary key. |
| /// |
| /// Optional. |
| core.bool? isPrimaryKey; |
| |
| /// The ordinal position of the column in the table. |
| /// |
| /// Optional. |
| core.String? ordinalPosition; |
| |
| SpannerColumn({ |
| this.column, |
| this.dataType, |
| this.isPrimaryKey, |
| this.ordinalPosition, |
| }); |
| |
| SpannerColumn.fromJson(core.Map json_) |
| : this( |
| column: json_['column'] as core.String?, |
| dataType: json_['dataType'] as core.String?, |
| isPrimaryKey: json_['isPrimaryKey'] as core.bool?, |
| ordinalPosition: json_['ordinalPosition'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final column = this.column; |
| final dataType = this.dataType; |
| final isPrimaryKey = this.isPrimaryKey; |
| final ordinalPosition = this.ordinalPosition; |
| return { |
| 'column': ?column, |
| 'dataType': ?dataType, |
| 'isPrimaryKey': ?isPrimaryKey, |
| 'ordinalPosition': ?ordinalPosition, |
| }; |
| } |
| } |
| |
| /// Spanner database structure. |
| class SpannerDatabase { |
| /// Spanner schemas in the database. |
| /// |
| /// Optional. |
| core.List<SpannerSchema>? schemas; |
| |
| SpannerDatabase({this.schemas}); |
| |
| SpannerDatabase.fromJson(core.Map json_) |
| : this( |
| schemas: (json_['schemas'] as core.List?) |
| ?.map( |
| (value) => SpannerSchema.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final schemas = this.schemas; |
| return {'schemas': ?schemas}; |
| } |
| } |
| |
| /// Spanner data source object identifier. |
| class SpannerObjectIdentifier { |
| /// The schema name. |
| /// |
| /// Optional. |
| core.String? schema; |
| |
| /// The table name. |
| /// |
| /// Required. |
| core.String? table; |
| |
| SpannerObjectIdentifier({this.schema, this.table}); |
| |
| SpannerObjectIdentifier.fromJson(core.Map json_) |
| : this( |
| schema: json_['schema'] as core.String?, |
| table: json_['table'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final schema = this.schema; |
| final table = this.table; |
| return {'schema': ?schema, 'table': ?table}; |
| } |
| } |
| |
| /// Profile for connecting to a Spanner source. |
| class SpannerProfile { |
| /// Cloud Spanner database resource. |
| /// |
| /// This field is immutable. Must be in the format: |
| /// projects/{project}/instances/{instance}/databases/{database_id}. |
| /// |
| /// Required. Immutable. |
| core.String? database; |
| |
| /// The Spanner endpoint to connect to. |
| /// |
| /// Defaults to the global endpoint (https://spanner.googleapis.com). Must be |
| /// in the format: https://spanner.{region}.rep.googleapis.com. |
| /// |
| /// Optional. |
| core.String? host; |
| |
| SpannerProfile({this.database, this.host}); |
| |
| SpannerProfile.fromJson(core.Map json_) |
| : this( |
| database: json_['database'] as core.String?, |
| host: json_['host'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final database = this.database; |
| final host = this.host; |
| return {'database': ?database, 'host': ?host}; |
| } |
| } |
| |
| /// Spanner schema. |
| class SpannerSchema { |
| /// The schema name. |
| /// |
| /// Required. |
| core.String? schema; |
| |
| /// Spanner tables in the schema. |
| /// |
| /// Optional. |
| core.List<SpannerTable>? tables; |
| |
| SpannerSchema({this.schema, this.tables}); |
| |
| SpannerSchema.fromJson(core.Map json_) |
| : this( |
| schema: json_['schema'] as core.String?, |
| tables: (json_['tables'] as core.List?) |
| ?.map( |
| (value) => SpannerTable.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final schema = this.schema; |
| final tables = this.tables; |
| return {'schema': ?schema, 'tables': ?tables}; |
| } |
| } |
| |
| /// Configuration for syncing data from a Spanner source. |
| class SpannerSourceConfig { |
| /// Whether to use Data Boost for Spanner backfills. |
| /// |
| /// Defaults to false if not set. |
| /// |
| /// Optional. |
| core.bool? backfillDataBoostEnabled; |
| |
| /// The change stream name to use for the stream. |
| /// |
| /// Required. Immutable. |
| core.String? changeStreamName; |
| |
| /// The Spanner objects to avoid retrieving. |
| /// |
| /// If some objects are both included and excluded, an error will be thrown. |
| /// |
| /// Optional. |
| SpannerDatabase? excludeObjects; |
| |
| /// The FGAC role to use for the stream. |
| /// |
| /// Optional. |
| core.String? fgacRole; |
| |
| /// The Spanner objects to retrieve from the data source. |
| /// |
| /// If some objects are both included and excluded, an error will be thrown. |
| /// |
| /// Optional. |
| SpannerDatabase? includeObjects; |
| |
| /// Maximum number of concurrent backfill tasks. |
| /// |
| /// Optional. |
| core.int? maxConcurrentBackfillTasks; |
| |
| /// Maximum number of concurrent CDC tasks. |
| /// |
| /// Optional. |
| core.int? maxConcurrentCdcTasks; |
| |
| /// The RPC priority to use for the stream. |
| /// |
| /// Optional. |
| /// Possible string values are: |
| /// - "SPANNER_RPC_PRIORITY_UNSPECIFIED" : Unspecified RPC priority. |
| /// - "LOW" : Low RPC priority. |
| /// - "MEDIUM" : Medium RPC priority. |
| /// - "HIGH" : High RPC priority. |
| core.String? spannerRpcPriority; |
| |
| SpannerSourceConfig({ |
| this.backfillDataBoostEnabled, |
| this.changeStreamName, |
| this.excludeObjects, |
| this.fgacRole, |
| this.includeObjects, |
| this.maxConcurrentBackfillTasks, |
| this.maxConcurrentCdcTasks, |
| this.spannerRpcPriority, |
| }); |
| |
| SpannerSourceConfig.fromJson(core.Map json_) |
| : this( |
| backfillDataBoostEnabled: |
| json_['backfillDataBoostEnabled'] as core.bool?, |
| changeStreamName: json_['changeStreamName'] as core.String?, |
| excludeObjects: json_.containsKey('excludeObjects') |
| ? SpannerDatabase.fromJson( |
| json_['excludeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| fgacRole: json_['fgacRole'] as core.String?, |
| includeObjects: json_.containsKey('includeObjects') |
| ? SpannerDatabase.fromJson( |
| json_['includeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| maxConcurrentBackfillTasks: |
| json_['maxConcurrentBackfillTasks'] as core.int?, |
| maxConcurrentCdcTasks: json_['maxConcurrentCdcTasks'] as core.int?, |
| spannerRpcPriority: json_['spannerRpcPriority'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final backfillDataBoostEnabled = this.backfillDataBoostEnabled; |
| final changeStreamName = this.changeStreamName; |
| final excludeObjects = this.excludeObjects; |
| final fgacRole = this.fgacRole; |
| final includeObjects = this.includeObjects; |
| final maxConcurrentBackfillTasks = this.maxConcurrentBackfillTasks; |
| final maxConcurrentCdcTasks = this.maxConcurrentCdcTasks; |
| final spannerRpcPriority = this.spannerRpcPriority; |
| return { |
| 'backfillDataBoostEnabled': ?backfillDataBoostEnabled, |
| 'changeStreamName': ?changeStreamName, |
| 'excludeObjects': ?excludeObjects, |
| 'fgacRole': ?fgacRole, |
| 'includeObjects': ?includeObjects, |
| 'maxConcurrentBackfillTasks': ?maxConcurrentBackfillTasks, |
| 'maxConcurrentCdcTasks': ?maxConcurrentCdcTasks, |
| 'spannerRpcPriority': ?spannerRpcPriority, |
| }; |
| } |
| } |
| |
| /// Spanner table. |
| class SpannerTable { |
| /// Spanner columns in the table. |
| /// |
| /// Optional. |
| core.List<SpannerColumn>? columns; |
| |
| /// The table name. |
| /// |
| /// Required. |
| core.String? table; |
| |
| SpannerTable({this.columns, this.table}); |
| |
| SpannerTable.fromJson(core.Map json_) |
| : this( |
| columns: (json_['columns'] as core.List?) |
| ?.map( |
| (value) => SpannerColumn.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| table: json_['table'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final columns = this.columns; |
| final table = this.table; |
| return {'columns': ?columns, 'table': ?table}; |
| } |
| } |
| |
| /// CDC strategy to start replicating from a specific position in the source. |
| class SpecificStartPosition { |
| /// MongoDB change stream position to start replicating from. |
| MongodbChangeStreamPosition? mongodbChangeStreamPosition; |
| |
| /// MySQL GTID set to start replicating from. |
| MysqlGtidPosition? mysqlGtidPosition; |
| |
| /// MySQL specific log position to start replicating from. |
| MysqlLogPosition? mysqlLogPosition; |
| |
| /// Oracle SCN to start replicating from. |
| OracleScnPosition? oracleScnPosition; |
| |
| /// SqlServer LSN to start replicating from. |
| SqlServerLsnPosition? sqlServerLsnPosition; |
| |
| SpecificStartPosition({ |
| this.mongodbChangeStreamPosition, |
| this.mysqlGtidPosition, |
| this.mysqlLogPosition, |
| this.oracleScnPosition, |
| this.sqlServerLsnPosition, |
| }); |
| |
| SpecificStartPosition.fromJson(core.Map json_) |
| : this( |
| mongodbChangeStreamPosition: |
| json_.containsKey('mongodbChangeStreamPosition') |
| ? MongodbChangeStreamPosition.fromJson( |
| json_['mongodbChangeStreamPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| mysqlGtidPosition: json_.containsKey('mysqlGtidPosition') |
| ? MysqlGtidPosition.fromJson( |
| json_['mysqlGtidPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| mysqlLogPosition: json_.containsKey('mysqlLogPosition') |
| ? MysqlLogPosition.fromJson( |
| json_['mysqlLogPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| oracleScnPosition: json_.containsKey('oracleScnPosition') |
| ? OracleScnPosition.fromJson( |
| json_['oracleScnPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| sqlServerLsnPosition: json_.containsKey('sqlServerLsnPosition') |
| ? SqlServerLsnPosition.fromJson( |
| json_['sqlServerLsnPosition'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final mongodbChangeStreamPosition = this.mongodbChangeStreamPosition; |
| final mysqlGtidPosition = this.mysqlGtidPosition; |
| final mysqlLogPosition = this.mysqlLogPosition; |
| final oracleScnPosition = this.oracleScnPosition; |
| final sqlServerLsnPosition = this.sqlServerLsnPosition; |
| return { |
| 'mongodbChangeStreamPosition': ?mongodbChangeStreamPosition, |
| 'mysqlGtidPosition': ?mysqlGtidPosition, |
| 'mysqlLogPosition': ?mysqlLogPosition, |
| 'oracleScnPosition': ?oracleScnPosition, |
| 'sqlServerLsnPosition': ?sqlServerLsnPosition, |
| }; |
| } |
| } |
| |
| /// Configuration to use Change Tables CDC read method. |
| typedef SqlServerChangeTables = $Empty; |
| |
| /// SQLServer Column. |
| class SqlServerColumn { |
| /// The column name. |
| core.String? column; |
| |
| /// The SQLServer data type. |
| core.String? dataType; |
| |
| /// Column length. |
| core.int? length; |
| |
| /// Whether or not the column can accept a null value. |
| core.bool? nullable; |
| |
| /// The ordinal position of the column in the table. |
| core.int? ordinalPosition; |
| |
| /// Column precision. |
| core.int? precision; |
| |
| /// Whether or not the column represents a primary key. |
| core.bool? primaryKey; |
| |
| /// Column scale. |
| core.int? scale; |
| |
| SqlServerColumn({ |
| this.column, |
| this.dataType, |
| this.length, |
| this.nullable, |
| this.ordinalPosition, |
| this.precision, |
| this.primaryKey, |
| this.scale, |
| }); |
| |
| SqlServerColumn.fromJson(core.Map json_) |
| : this( |
| column: json_['column'] as core.String?, |
| dataType: json_['dataType'] as core.String?, |
| length: json_['length'] as core.int?, |
| nullable: json_['nullable'] as core.bool?, |
| ordinalPosition: json_['ordinalPosition'] as core.int?, |
| precision: json_['precision'] as core.int?, |
| primaryKey: json_['primaryKey'] as core.bool?, |
| scale: json_['scale'] as core.int?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final column = this.column; |
| final dataType = this.dataType; |
| final length = this.length; |
| final nullable = this.nullable; |
| final ordinalPosition = this.ordinalPosition; |
| final precision = this.precision; |
| final primaryKey = this.primaryKey; |
| final scale = this.scale; |
| return { |
| 'column': ?column, |
| 'dataType': ?dataType, |
| 'length': ?length, |
| 'nullable': ?nullable, |
| 'ordinalPosition': ?ordinalPosition, |
| 'precision': ?precision, |
| 'primaryKey': ?primaryKey, |
| 'scale': ?scale, |
| }; |
| } |
| } |
| |
| /// SQL Server LSN position |
| class SqlServerLsnPosition { |
| /// Log sequence number (LSN) from where Logs will be read |
| /// |
| /// Required. |
| core.String? lsn; |
| |
| SqlServerLsnPosition({this.lsn}); |
| |
| SqlServerLsnPosition.fromJson(core.Map json_) |
| : this(lsn: json_['lsn'] as core.String?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final lsn = this.lsn; |
| return {'lsn': ?lsn}; |
| } |
| } |
| |
| /// SQLServer data source object identifier. |
| typedef SqlServerObjectIdentifier = $ObjectIdentifier; |
| |
| /// Profile for connecting to a SQLServer source. |
| class SqlServerProfile { |
| /// Database for the SQLServer connection. |
| /// |
| /// Required. |
| core.String? database; |
| |
| /// Hostname for the SQLServer connection. |
| /// |
| /// Required. |
| core.String? hostname; |
| |
| /// Password for the SQLServer connection. |
| /// |
| /// Mutually exclusive with the `secret_manager_stored_password` field. |
| /// |
| /// Optional. |
| core.String? password; |
| |
| /// Port for the SQLServer connection, default value is 1433. |
| core.int? port; |
| |
| /// A reference to a Secret Manager resource name storing the SQLServer |
| /// connection password. |
| /// |
| /// Mutually exclusive with the `password` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredPassword; |
| |
| /// SSL configuration for the SQLServer connection. |
| /// |
| /// Optional. |
| SqlServerSslConfig? sslConfig; |
| |
| /// Username for the SQLServer connection. |
| /// |
| /// Required. |
| core.String? username; |
| |
| SqlServerProfile({ |
| this.database, |
| this.hostname, |
| this.password, |
| this.port, |
| this.secretManagerStoredPassword, |
| this.sslConfig, |
| this.username, |
| }); |
| |
| SqlServerProfile.fromJson(core.Map json_) |
| : this( |
| database: json_['database'] as core.String?, |
| hostname: json_['hostname'] as core.String?, |
| password: json_['password'] as core.String?, |
| port: json_['port'] as core.int?, |
| secretManagerStoredPassword: |
| json_['secretManagerStoredPassword'] as core.String?, |
| sslConfig: json_.containsKey('sslConfig') |
| ? SqlServerSslConfig.fromJson( |
| json_['sslConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| username: json_['username'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final database = this.database; |
| final hostname = this.hostname; |
| final password = this.password; |
| final port = this.port; |
| final secretManagerStoredPassword = this.secretManagerStoredPassword; |
| final sslConfig = this.sslConfig; |
| final username = this.username; |
| return { |
| 'database': ?database, |
| 'hostname': ?hostname, |
| 'password': ?password, |
| 'port': ?port, |
| 'secretManagerStoredPassword': ?secretManagerStoredPassword, |
| 'sslConfig': ?sslConfig, |
| 'username': ?username, |
| }; |
| } |
| } |
| |
| /// SQLServer database structure. |
| class SqlServerRdbms { |
| /// SQLServer schemas in the database server. |
| core.List<SqlServerSchema>? schemas; |
| |
| SqlServerRdbms({this.schemas}); |
| |
| SqlServerRdbms.fromJson(core.Map json_) |
| : this( |
| schemas: (json_['schemas'] as core.List?) |
| ?.map( |
| (value) => SqlServerSchema.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final schemas = this.schemas; |
| return {'schemas': ?schemas}; |
| } |
| } |
| |
| /// SQLServer schema. |
| class SqlServerSchema { |
| /// The schema name. |
| core.String? schema; |
| |
| /// Tables in the schema. |
| core.List<SqlServerTable>? tables; |
| |
| SqlServerSchema({this.schema, this.tables}); |
| |
| SqlServerSchema.fromJson(core.Map json_) |
| : this( |
| schema: json_['schema'] as core.String?, |
| tables: (json_['tables'] as core.List?) |
| ?.map( |
| (value) => SqlServerTable.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final schema = this.schema; |
| final tables = this.tables; |
| return {'schema': ?schema, 'tables': ?tables}; |
| } |
| } |
| |
| /// Configuration for syncing data from a SQLServer source. |
| class SqlServerSourceConfig { |
| /// CDC reader reads from change tables. |
| SqlServerChangeTables? changeTables; |
| |
| /// The SQLServer objects to exclude from the stream. |
| SqlServerRdbms? excludeObjects; |
| |
| /// The SQLServer objects to include in the stream. |
| SqlServerRdbms? includeObjects; |
| |
| /// Max concurrent backfill tasks. |
| core.int? maxConcurrentBackfillTasks; |
| |
| /// Max concurrent CDC tasks. |
| core.int? maxConcurrentCdcTasks; |
| |
| /// CDC reader reads from transaction logs. |
| SqlServerTransactionLogs? transactionLogs; |
| |
| SqlServerSourceConfig({ |
| this.changeTables, |
| this.excludeObjects, |
| this.includeObjects, |
| this.maxConcurrentBackfillTasks, |
| this.maxConcurrentCdcTasks, |
| this.transactionLogs, |
| }); |
| |
| SqlServerSourceConfig.fromJson(core.Map json_) |
| : this( |
| changeTables: json_.containsKey('changeTables') |
| ? SqlServerChangeTables.fromJson( |
| json_['changeTables'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| excludeObjects: json_.containsKey('excludeObjects') |
| ? SqlServerRdbms.fromJson( |
| json_['excludeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| includeObjects: json_.containsKey('includeObjects') |
| ? SqlServerRdbms.fromJson( |
| json_['includeObjects'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| maxConcurrentBackfillTasks: |
| json_['maxConcurrentBackfillTasks'] as core.int?, |
| maxConcurrentCdcTasks: json_['maxConcurrentCdcTasks'] as core.int?, |
| transactionLogs: json_.containsKey('transactionLogs') |
| ? SqlServerTransactionLogs.fromJson( |
| json_['transactionLogs'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final changeTables = this.changeTables; |
| final excludeObjects = this.excludeObjects; |
| final includeObjects = this.includeObjects; |
| final maxConcurrentBackfillTasks = this.maxConcurrentBackfillTasks; |
| final maxConcurrentCdcTasks = this.maxConcurrentCdcTasks; |
| final transactionLogs = this.transactionLogs; |
| return { |
| 'changeTables': ?changeTables, |
| 'excludeObjects': ?excludeObjects, |
| 'includeObjects': ?includeObjects, |
| 'maxConcurrentBackfillTasks': ?maxConcurrentBackfillTasks, |
| 'maxConcurrentCdcTasks': ?maxConcurrentCdcTasks, |
| 'transactionLogs': ?transactionLogs, |
| }; |
| } |
| } |
| |
| /// SQL Server SSL configuration information. |
| class SqlServerSslConfig { |
| /// If set, Datastream will enforce encryption without authenticating server |
| /// identity. |
| /// |
| /// Server certificates will be trusted by default. |
| BasicEncryption? basicEncryption; |
| |
| /// If set, Datastream will enforce encryption and authenticate server |
| /// identity. |
| EncryptionAndServerValidation? encryptionAndServerValidation; |
| |
| /// If set, Datastream will not enforce encryption. |
| /// |
| /// If the DB server mandates encryption, then connection will be encrypted |
| /// but server identity will not be authenticated. |
| EncryptionNotEnforced? encryptionNotEnforced; |
| |
| SqlServerSslConfig({ |
| this.basicEncryption, |
| this.encryptionAndServerValidation, |
| this.encryptionNotEnforced, |
| }); |
| |
| SqlServerSslConfig.fromJson(core.Map json_) |
| : this( |
| basicEncryption: json_.containsKey('basicEncryption') |
| ? BasicEncryption.fromJson( |
| json_['basicEncryption'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| encryptionAndServerValidation: |
| json_.containsKey('encryptionAndServerValidation') |
| ? EncryptionAndServerValidation.fromJson( |
| json_['encryptionAndServerValidation'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| encryptionNotEnforced: json_.containsKey('encryptionNotEnforced') |
| ? EncryptionNotEnforced.fromJson( |
| json_['encryptionNotEnforced'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final basicEncryption = this.basicEncryption; |
| final encryptionAndServerValidation = this.encryptionAndServerValidation; |
| final encryptionNotEnforced = this.encryptionNotEnforced; |
| return { |
| 'basicEncryption': ?basicEncryption, |
| 'encryptionAndServerValidation': ?encryptionAndServerValidation, |
| 'encryptionNotEnforced': ?encryptionNotEnforced, |
| }; |
| } |
| } |
| |
| /// SQLServer table. |
| class SqlServerTable { |
| /// SQLServer columns in the schema. |
| /// |
| /// When unspecified as part of include/exclude objects, includes/excludes |
| /// everything. |
| core.List<SqlServerColumn>? columns; |
| |
| /// The table name. |
| core.String? table; |
| |
| SqlServerTable({this.columns, this.table}); |
| |
| SqlServerTable.fromJson(core.Map json_) |
| : this( |
| columns: (json_['columns'] as core.List?) |
| ?.map( |
| (value) => SqlServerColumn.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| table: json_['table'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final columns = this.columns; |
| final table = this.table; |
| return {'columns': ?columns, 'table': ?table}; |
| } |
| } |
| |
| /// Configuration to use Transaction Logs CDC read method. |
| typedef SqlServerTransactionLogs = $Empty; |
| |
| /// Srv connection format. |
| typedef SrvConnectionFormat = $Empty; |
| |
| /// Standard connection format. |
| class StandardConnectionFormat { |
| /// Deprecated: Use the `additional_options` map to specify the |
| /// `directConnection` parameter instead. |
| /// |
| /// For example: `additional_options = {"directConnection": "true"}`. |
| /// Specifies whether the client connects directly to the host\[:port\] in the |
| /// connection URI. |
| /// |
| /// Optional. |
| @core.Deprecated( |
| 'Not supported. Member documentation may have more information.', |
| ) |
| core.bool? directConnection; |
| |
| StandardConnectionFormat({this.directConnection}); |
| |
| StandardConnectionFormat.fromJson(core.Map json_) |
| : this(directConnection: json_['directConnection'] as core.bool?); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final directConnection = this.directConnection; |
| return {'directConnection': ?directConnection}; |
| } |
| } |
| |
| /// Request for manually initiating a backfill job for a specific stream object. |
| class StartBackfillJobRequest { |
| /// Optional event filter. |
| /// |
| /// If not set, or empty, the backfill will be performed on the entire object. |
| /// This is currently used for partial backfill and only supported for SQL |
| /// Server sources. |
| /// |
| /// Optional. |
| EventFilter? eventFilter; |
| |
| StartBackfillJobRequest({this.eventFilter}); |
| |
| StartBackfillJobRequest.fromJson(core.Map json_) |
| : this( |
| eventFilter: json_.containsKey('eventFilter') |
| ? EventFilter.fromJson( |
| json_['eventFilter'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final eventFilter = this.eventFilter; |
| return {'eventFilter': ?eventFilter}; |
| } |
| } |
| |
| /// Response for manually initiating a backfill job for a specific stream |
| /// object. |
| class StartBackfillJobResponse { |
| /// The stream object resource a backfill job was started for. |
| StreamObject? object; |
| |
| StartBackfillJobResponse({this.object}); |
| |
| StartBackfillJobResponse.fromJson(core.Map json_) |
| : this( |
| object: json_.containsKey('object') |
| ? StreamObject.fromJson( |
| json_['object'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final object = this.object; |
| return {'object': ?object}; |
| } |
| } |
| |
| /// Static IP address connectivity. |
| /// |
| /// Used when the source database is configured to allow incoming connections |
| /// from the Datastream public IP addresses for the region specified in the |
| /// connection profile. |
| typedef StaticServiceIpConnectivity = $Empty; |
| |
| /// The `Status` type defines a logical error model that is suitable for |
| /// different programming environments, including REST APIs and RPC APIs. |
| /// |
| /// It is used by [gRPC](https://github.com/grpc). Each `Status` message |
| /// contains three pieces of data: error code, error message, and error details. |
| /// You can find out more about this error model and how to work with it in the |
| /// [API Design Guide](https://cloud.google.com/apis/design/errors). |
| typedef Status = $Status00; |
| |
| /// Request for manually stopping a running backfill job for a specific stream |
| /// object. |
| typedef StopBackfillJobRequest = $Empty; |
| |
| /// Response for manually stop a backfill job for a specific stream object. |
| class StopBackfillJobResponse { |
| /// The stream object resource the backfill job was stopped for. |
| StreamObject? object; |
| |
| StopBackfillJobResponse({this.object}); |
| |
| StopBackfillJobResponse.fromJson(core.Map json_) |
| : this( |
| object: json_.containsKey('object') |
| ? StreamObject.fromJson( |
| json_['object'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final object = this.object; |
| return {'object': ?object}; |
| } |
| } |
| |
| /// A resource representing streaming data from a source to a destination. |
| class Stream { |
| /// Automatically backfill objects included in the stream source |
| /// configuration. |
| /// |
| /// Specific objects can be excluded. |
| BackfillAllStrategy? backfillAll; |
| |
| /// Do not automatically backfill any objects. |
| BackfillNoneStrategy? backfillNone; |
| |
| /// The creation time of the stream. |
| /// |
| /// Output only. |
| core.String? createTime; |
| |
| /// A reference to a KMS encryption key. |
| /// |
| /// If provided, it will be used to encrypt the data. If left blank, data will |
| /// be encrypted using an internal Stream-specific encryption key provisioned |
| /// through KMS. |
| /// |
| /// Immutable. |
| core.String? customerManagedEncryptionKey; |
| |
| /// Destination connection profile configuration. |
| /// |
| /// Required. |
| DestinationConfig? destinationConfig; |
| |
| /// Display name. |
| /// |
| /// Required. |
| core.String? displayName; |
| |
| /// Errors on the Stream. |
| /// |
| /// Output only. |
| core.List<Error>? errors; |
| |
| /// Labels. |
| core.Map<core.String, core.String>? labels; |
| |
| /// If the stream was recovered, the time of the last recovery. |
| /// |
| /// Note: This field is currently experimental. |
| /// |
| /// Output only. |
| core.String? lastRecoveryTime; |
| |
| /// Identifier. |
| /// |
| /// The stream's name. |
| /// |
| /// Output only. |
| core.String? name; |
| |
| /// Rule sets to apply to the stream. |
| /// |
| /// Optional. |
| core.List<RuleSet>? ruleSets; |
| |
| /// Reserved for future use. |
| /// |
| /// Output only. |
| core.bool? satisfiesPzi; |
| |
| /// Reserved for future use. |
| /// |
| /// Output only. |
| core.bool? satisfiesPzs; |
| |
| /// Source connection profile configuration. |
| /// |
| /// Required. |
| SourceConfig? sourceConfig; |
| |
| /// The state of the stream. |
| /// Possible string values are: |
| /// - "STATE_UNSPECIFIED" : Unspecified stream state. |
| /// - "NOT_STARTED" : The stream has been created but has not yet started |
| /// streaming data. |
| /// - "RUNNING" : The stream is running. |
| /// - "PAUSED" : The stream is paused. |
| /// - "MAINTENANCE" : The stream is in maintenance mode. Updates are rejected |
| /// on the resource in this state. |
| /// - "FAILED" : The stream is experiencing an error that is preventing data |
| /// from being streamed. |
| /// - "FAILED_PERMANENTLY" : The stream has experienced a terminal failure. |
| /// - "STARTING" : The stream is starting, but not yet running. |
| /// - "DRAINING" : The Stream is no longer reading new events, but still |
| /// writing events in the buffer. |
| core.String? state; |
| |
| /// The last update time of the stream. |
| /// |
| /// Output only. |
| core.String? updateTime; |
| |
| Stream({ |
| this.backfillAll, |
| this.backfillNone, |
| this.createTime, |
| this.customerManagedEncryptionKey, |
| this.destinationConfig, |
| this.displayName, |
| this.errors, |
| this.labels, |
| this.lastRecoveryTime, |
| this.name, |
| this.ruleSets, |
| this.satisfiesPzi, |
| this.satisfiesPzs, |
| this.sourceConfig, |
| this.state, |
| this.updateTime, |
| }); |
| |
| Stream.fromJson(core.Map json_) |
| : this( |
| backfillAll: json_.containsKey('backfillAll') |
| ? BackfillAllStrategy.fromJson( |
| json_['backfillAll'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| backfillNone: json_.containsKey('backfillNone') |
| ? BackfillNoneStrategy.fromJson( |
| json_['backfillNone'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| createTime: json_['createTime'] as core.String?, |
| customerManagedEncryptionKey: |
| json_['customerManagedEncryptionKey'] as core.String?, |
| destinationConfig: json_.containsKey('destinationConfig') |
| ? DestinationConfig.fromJson( |
| json_['destinationConfig'] |
| as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| displayName: json_['displayName'] as core.String?, |
| errors: (json_['errors'] as core.List?) |
| ?.map( |
| (value) => |
| Error.fromJson(value as core.Map<core.String, core.dynamic>), |
| ) |
| .toList(), |
| labels: (json_['labels'] as core.Map<core.String, core.dynamic>?)?.map( |
| (key, value) => core.MapEntry(key, value as core.String), |
| ), |
| lastRecoveryTime: json_['lastRecoveryTime'] as core.String?, |
| name: json_['name'] as core.String?, |
| ruleSets: (json_['ruleSets'] as core.List?) |
| ?.map( |
| (value) => RuleSet.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| satisfiesPzi: json_['satisfiesPzi'] as core.bool?, |
| satisfiesPzs: json_['satisfiesPzs'] as core.bool?, |
| sourceConfig: json_.containsKey('sourceConfig') |
| ? SourceConfig.fromJson( |
| json_['sourceConfig'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| state: json_['state'] as core.String?, |
| updateTime: json_['updateTime'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final backfillAll = this.backfillAll; |
| final backfillNone = this.backfillNone; |
| final createTime = this.createTime; |
| final customerManagedEncryptionKey = this.customerManagedEncryptionKey; |
| final destinationConfig = this.destinationConfig; |
| final displayName = this.displayName; |
| final errors = this.errors; |
| final labels = this.labels; |
| final lastRecoveryTime = this.lastRecoveryTime; |
| final name = this.name; |
| final ruleSets = this.ruleSets; |
| final satisfiesPzi = this.satisfiesPzi; |
| final satisfiesPzs = this.satisfiesPzs; |
| final sourceConfig = this.sourceConfig; |
| final state = this.state; |
| final updateTime = this.updateTime; |
| return { |
| 'backfillAll': ?backfillAll, |
| 'backfillNone': ?backfillNone, |
| 'createTime': ?createTime, |
| 'customerManagedEncryptionKey': ?customerManagedEncryptionKey, |
| 'destinationConfig': ?destinationConfig, |
| 'displayName': ?displayName, |
| 'errors': ?errors, |
| 'labels': ?labels, |
| 'lastRecoveryTime': ?lastRecoveryTime, |
| 'name': ?name, |
| 'ruleSets': ?ruleSets, |
| 'satisfiesPzi': ?satisfiesPzi, |
| 'satisfiesPzs': ?satisfiesPzs, |
| 'sourceConfig': ?sourceConfig, |
| 'state': ?state, |
| 'updateTime': ?updateTime, |
| }; |
| } |
| } |
| |
| /// Configuration to stream large object values. |
| typedef StreamLargeObjects = $Empty; |
| |
| /// A specific stream object (e.g a specific DB table). |
| class StreamObject { |
| /// The latest backfill job that was initiated for the stream object. |
| BackfillJob? backfillJob; |
| |
| /// The creation time of the object. |
| /// |
| /// Output only. |
| core.String? createTime; |
| |
| /// The customization rules for the object. |
| /// |
| /// These rules are derived from the parent Stream's `rule_sets` and represent |
| /// the intended configuration for the object. |
| /// |
| /// Output only. |
| core.List<CustomizationRule>? customizationRules; |
| |
| /// Display name. |
| /// |
| /// Required. |
| core.String? displayName; |
| |
| /// Active errors on the object. |
| /// |
| /// Output only. |
| core.List<Error>? errors; |
| |
| /// Identifier. |
| /// |
| /// The object resource's name. |
| /// |
| /// Output only. |
| core.String? name; |
| |
| /// The object identifier in the data source. |
| SourceObjectIdentifier? sourceObject; |
| |
| /// The last update time of the object. |
| /// |
| /// Output only. |
| core.String? updateTime; |
| |
| StreamObject({ |
| this.backfillJob, |
| this.createTime, |
| this.customizationRules, |
| this.displayName, |
| this.errors, |
| this.name, |
| this.sourceObject, |
| this.updateTime, |
| }); |
| |
| StreamObject.fromJson(core.Map json_) |
| : this( |
| backfillJob: json_.containsKey('backfillJob') |
| ? BackfillJob.fromJson( |
| json_['backfillJob'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| createTime: json_['createTime'] as core.String?, |
| customizationRules: (json_['customizationRules'] as core.List?) |
| ?.map( |
| (value) => CustomizationRule.fromJson( |
| value as core.Map<core.String, core.dynamic>, |
| ), |
| ) |
| .toList(), |
| displayName: json_['displayName'] as core.String?, |
| errors: (json_['errors'] as core.List?) |
| ?.map( |
| (value) => |
| Error.fromJson(value as core.Map<core.String, core.dynamic>), |
| ) |
| .toList(), |
| name: json_['name'] as core.String?, |
| sourceObject: json_.containsKey('sourceObject') |
| ? SourceObjectIdentifier.fromJson( |
| json_['sourceObject'] as core.Map<core.String, core.dynamic>, |
| ) |
| : null, |
| updateTime: json_['updateTime'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final backfillJob = this.backfillJob; |
| final createTime = this.createTime; |
| final customizationRules = this.customizationRules; |
| final displayName = this.displayName; |
| final errors = this.errors; |
| final name = this.name; |
| final sourceObject = this.sourceObject; |
| final updateTime = this.updateTime; |
| return { |
| 'backfillJob': ?backfillJob, |
| 'createTime': ?createTime, |
| 'customizationRules': ?customizationRules, |
| 'displayName': ?displayName, |
| 'errors': ?errors, |
| 'name': ?name, |
| 'sourceObject': ?sourceObject, |
| 'updateTime': ?updateTime, |
| }; |
| } |
| } |
| |
| /// Time unit column partitioning. |
| /// |
| /// see |
| /// https://cloud.google.com/bigquery/docs/partitioned-tables#date_timestamp_partitioned_tables |
| class TimeUnitPartition { |
| /// The partitioning column. |
| /// |
| /// Required. |
| core.String? column; |
| |
| /// Partition granularity. |
| /// |
| /// Optional. |
| /// Possible string values are: |
| /// - "PARTITIONING_TIME_GRANULARITY_UNSPECIFIED" : Unspecified partitioing |
| /// interval. |
| /// - "PARTITIONING_TIME_GRANULARITY_HOUR" : Hourly partitioning. |
| /// - "PARTITIONING_TIME_GRANULARITY_DAY" : Daily partitioning. |
| /// - "PARTITIONING_TIME_GRANULARITY_MONTH" : Monthly partitioning. |
| /// - "PARTITIONING_TIME_GRANULARITY_YEAR" : Yearly partitioning. |
| core.String? partitioningTimeGranularity; |
| |
| TimeUnitPartition({this.column, this.partitioningTimeGranularity}); |
| |
| TimeUnitPartition.fromJson(core.Map json_) |
| : this( |
| column: json_['column'] as core.String?, |
| partitioningTimeGranularity: |
| json_['partitioningTimeGranularity'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final column = this.column; |
| final partitioningTimeGranularity = this.partitioningTimeGranularity; |
| return { |
| 'column': ?column, |
| 'partitioningTimeGranularity': ?partitioningTimeGranularity, |
| }; |
| } |
| } |
| |
| /// Username-password credentials. |
| class UserCredentials { |
| /// Password for the Salesforce connection. |
| /// |
| /// Mutually exclusive with the `secret_manager_stored_password` field. |
| /// |
| /// Optional. |
| core.String? password; |
| |
| /// A reference to a Secret Manager resource name storing the Salesforce |
| /// connection's password. |
| /// |
| /// Mutually exclusive with the `password` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredPassword; |
| |
| /// A reference to a Secret Manager resource name storing the Salesforce |
| /// connection's security token. |
| /// |
| /// Mutually exclusive with the `security_token` field. |
| /// |
| /// Optional. |
| core.String? secretManagerStoredSecurityToken; |
| |
| /// Security token for the Salesforce connection. |
| /// |
| /// Mutually exclusive with the `secret_manager_stored_security_token` field. |
| /// |
| /// Optional. |
| core.String? securityToken; |
| |
| /// Username for the Salesforce connection. |
| /// |
| /// Required. |
| core.String? username; |
| |
| UserCredentials({ |
| this.password, |
| this.secretManagerStoredPassword, |
| this.secretManagerStoredSecurityToken, |
| this.securityToken, |
| this.username, |
| }); |
| |
| UserCredentials.fromJson(core.Map json_) |
| : this( |
| password: json_['password'] as core.String?, |
| secretManagerStoredPassword: |
| json_['secretManagerStoredPassword'] as core.String?, |
| secretManagerStoredSecurityToken: |
| json_['secretManagerStoredSecurityToken'] as core.String?, |
| securityToken: json_['securityToken'] as core.String?, |
| username: json_['username'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final password = this.password; |
| final secretManagerStoredPassword = this.secretManagerStoredPassword; |
| final secretManagerStoredSecurityToken = |
| this.secretManagerStoredSecurityToken; |
| final securityToken = this.securityToken; |
| final username = this.username; |
| return { |
| 'password': ?password, |
| 'secretManagerStoredPassword': ?secretManagerStoredPassword, |
| 'secretManagerStoredSecurityToken': ?secretManagerStoredSecurityToken, |
| 'securityToken': ?securityToken, |
| 'username': ?username, |
| }; |
| } |
| } |
| |
| /// The VPC Peering configuration is used to create VPC peering between |
| /// Datastream and the consumer's VPC. |
| class VpcPeeringConfig { |
| /// A free subnet for peering. |
| /// |
| /// (CIDR of /29) |
| /// |
| /// Required. |
| core.String? subnet; |
| |
| /// Fully qualified name of the VPC that Datastream will peer to. |
| /// |
| /// Format: `projects/{project}/global/{networks}/{name}` |
| /// |
| /// Required. |
| core.String? vpc; |
| |
| VpcPeeringConfig({this.subnet, this.vpc}); |
| |
| VpcPeeringConfig.fromJson(core.Map json_) |
| : this( |
| subnet: json_['subnet'] as core.String?, |
| vpc: json_['vpc'] as core.String?, |
| ); |
| |
| core.Map<core.String, core.dynamic> toJson() { |
| final subnet = this.subnet; |
| final vpc = this.vpc; |
| return {'subnet': ?subnet, 'vpc': ?vpc}; |
| } |
| } |