diff --git a/ui/.eslintrc.json b/ui/.eslintrc.json index 8b326a0cc5..1aa3f3c57b 100644 --- a/ui/.eslintrc.json +++ b/ui/.eslintrc.json @@ -1,7 +1,3 @@ { - "extends": [ - "next", - "plugin:storybook/recommended", - "prettier" - ] + "extends": ["next", "plugin:storybook/recommended", "prettier"] } diff --git a/ui/app/connectors/page.tsx b/ui/app/connectors/page.tsx index b8570fc1c7..3297001d65 100644 --- a/ui/app/connectors/page.tsx +++ b/ui/app/connectors/page.tsx @@ -10,7 +10,6 @@ import { SearchField } from '@/lib/SearchField'; import { Select } from '@/lib/Select'; import { Table, TableCell, TableRow } from '@/lib/Table'; import { GetFlowServiceClient } from '@/rpc/rpc'; -import getConfig from 'next/config'; import Link from 'next/link'; import { Suspense } from 'react'; import { Header } from '../../lib/Header'; diff --git a/ui/grpc_generated/flow.ts b/ui/grpc_generated/flow.ts index c129730d5b..f2b1f108cb 100644 --- a/ui/grpc_generated/flow.ts +++ b/ui/grpc_generated/flow.ts @@ -1,10 +1,10 @@ /* eslint-disable */ -import Long from "long"; -import _m0 from "protobufjs/minimal"; -import { Timestamp } from "./google/protobuf/timestamp"; -import { Peer } from "./peers"; +import Long from 'long'; +import _m0 from 'protobufjs/minimal'; +import { Timestamp } from './google/protobuf/timestamp'; +import { Peer } from './peers'; -export const protobufPackage = "peerdb_flow"; +export const protobufPackage = 'peerdb_flow'; /** protos for qrep */ export enum QRepSyncMode { @@ -16,13 +16,13 @@ export enum QRepSyncMode { export function qRepSyncModeFromJSON(object: any): QRepSyncMode { switch (object) { case 0: - case "QREP_SYNC_MODE_MULTI_INSERT": + case 'QREP_SYNC_MODE_MULTI_INSERT': return QRepSyncMode.QREP_SYNC_MODE_MULTI_INSERT; case 1: - case "QREP_SYNC_MODE_STORAGE_AVRO": + case 'QREP_SYNC_MODE_STORAGE_AVRO': return QRepSyncMode.QREP_SYNC_MODE_STORAGE_AVRO; case -1: - case "UNRECOGNIZED": + case 'UNRECOGNIZED': default: return QRepSyncMode.UNRECOGNIZED; } @@ -31,12 +31,12 @@ export function qRepSyncModeFromJSON(object: any): QRepSyncMode { export function qRepSyncModeToJSON(object: QRepSyncMode): string { switch (object) { case QRepSyncMode.QREP_SYNC_MODE_MULTI_INSERT: - return "QREP_SYNC_MODE_MULTI_INSERT"; + return 'QREP_SYNC_MODE_MULTI_INSERT'; case QRepSyncMode.QREP_SYNC_MODE_STORAGE_AVRO: - return "QREP_SYNC_MODE_STORAGE_AVRO"; + return 'QREP_SYNC_MODE_STORAGE_AVRO'; case QRepSyncMode.UNRECOGNIZED: default: - return "UNRECOGNIZED"; + return 'UNRECOGNIZED'; } } @@ -49,13 +49,13 @@ export enum QRepWriteType { export function qRepWriteTypeFromJSON(object: any): QRepWriteType { switch (object) { case 0: - case "QREP_WRITE_MODE_APPEND": + case 'QREP_WRITE_MODE_APPEND': return QRepWriteType.QREP_WRITE_MODE_APPEND; case 1: - case "QREP_WRITE_MODE_UPSERT": + case 'QREP_WRITE_MODE_UPSERT': return QRepWriteType.QREP_WRITE_MODE_UPSERT; case -1: - case "UNRECOGNIZED": + case 'UNRECOGNIZED': default: return QRepWriteType.UNRECOGNIZED; } @@ -64,12 +64,12 @@ export function qRepWriteTypeFromJSON(object: any): QRepWriteType { export function qRepWriteTypeToJSON(object: QRepWriteType): string { switch (object) { case QRepWriteType.QREP_WRITE_MODE_APPEND: - return "QREP_WRITE_MODE_APPEND"; + return 'QREP_WRITE_MODE_APPEND'; case QRepWriteType.QREP_WRITE_MODE_UPSERT: - return "QREP_WRITE_MODE_UPSERT"; + return 'QREP_WRITE_MODE_UPSERT'; case QRepWriteType.UNRECOGNIZED: default: - return "UNRECOGNIZED"; + return 'UNRECOGNIZED'; } } @@ -326,9 +326,7 @@ export interface QRepConfig { maxParallelWorkers: number; /** time to wait between getting partitions to process */ waitBetweenBatchesSeconds: number; - writeMode: - | QRepWriteMode - | undefined; + writeMode: QRepWriteMode | undefined; /** * This is only used when sync_mode is AVRO * this is the location where the avro files will be written @@ -366,22 +364,26 @@ export interface DropFlowInput { } function createBaseTableNameMapping(): TableNameMapping { - return { sourceTableName: "", destinationTableName: "" }; + return { sourceTableName: '', destinationTableName: '' }; } export const TableNameMapping = { - encode(message: TableNameMapping, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.sourceTableName !== "") { + encode( + message: TableNameMapping, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.sourceTableName !== '') { writer.uint32(10).string(message.sourceTableName); } - if (message.destinationTableName !== "") { + if (message.destinationTableName !== '') { writer.uint32(18).string(message.destinationTableName); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): TableNameMapping { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTableNameMapping(); while (reader.pos < end) { @@ -412,29 +414,37 @@ export const TableNameMapping = { fromJSON(object: any): TableNameMapping { return { - sourceTableName: isSet(object.sourceTableName) ? String(object.sourceTableName) : "", - destinationTableName: isSet(object.destinationTableName) ? String(object.destinationTableName) : "", + sourceTableName: isSet(object.sourceTableName) + ? String(object.sourceTableName) + : '', + destinationTableName: isSet(object.destinationTableName) + ? String(object.destinationTableName) + : '', }; }, toJSON(message: TableNameMapping): unknown { const obj: any = {}; - if (message.sourceTableName !== "") { + if (message.sourceTableName !== '') { obj.sourceTableName = message.sourceTableName; } - if (message.destinationTableName !== "") { + if (message.destinationTableName !== '') { obj.destinationTableName = message.destinationTableName; } return obj; }, - create, I>>(base?: I): TableNameMapping { + create, I>>( + base?: I + ): TableNameMapping { return TableNameMapping.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): TableNameMapping { + fromPartial, I>>( + object: I + ): TableNameMapping { const message = createBaseTableNameMapping(); - message.sourceTableName = object.sourceTableName ?? ""; - message.destinationTableName = object.destinationTableName ?? ""; + message.sourceTableName = object.sourceTableName ?? ''; + message.destinationTableName = object.destinationTableName ?? ''; return message; }, }; @@ -443,7 +453,7 @@ function createBaseFlowConnectionConfigs(): FlowConnectionConfigs { return { source: undefined, destination: undefined, - flowJobName: "", + flowJobName: '', tableSchema: undefined, tableNameMapping: {}, srcTableIdNameMapping: {}, @@ -451,43 +461,56 @@ function createBaseFlowConnectionConfigs(): FlowConnectionConfigs { metadataPeer: undefined, maxBatchSize: 0, doInitialCopy: false, - publicationName: "", + publicationName: '', snapshotNumRowsPerPartition: 0, snapshotMaxParallelWorkers: 0, snapshotNumTablesInParallel: 0, snapshotSyncMode: 0, cdcSyncMode: 0, - snapshotStagingPath: "", - cdcStagingPath: "", + snapshotStagingPath: '', + cdcStagingPath: '', softDelete: false, - replicationSlotName: "", + replicationSlotName: '', }; } export const FlowConnectionConfigs = { - encode(message: FlowConnectionConfigs, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: FlowConnectionConfigs, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.source !== undefined) { Peer.encode(message.source, writer.uint32(10).fork()).ldelim(); } if (message.destination !== undefined) { Peer.encode(message.destination, writer.uint32(18).fork()).ldelim(); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { writer.uint32(26).string(message.flowJobName); } if (message.tableSchema !== undefined) { - TableSchema.encode(message.tableSchema, writer.uint32(34).fork()).ldelim(); + TableSchema.encode( + message.tableSchema, + writer.uint32(34).fork() + ).ldelim(); } Object.entries(message.tableNameMapping).forEach(([key, value]) => { - FlowConnectionConfigs_TableNameMappingEntry.encode({ key: key as any, value }, writer.uint32(42).fork()).ldelim(); + FlowConnectionConfigs_TableNameMappingEntry.encode( + { key: key as any, value }, + writer.uint32(42).fork() + ).ldelim(); }); Object.entries(message.srcTableIdNameMapping).forEach(([key, value]) => { - FlowConnectionConfigs_SrcTableIdNameMappingEntry.encode({ key: key as any, value }, writer.uint32(50).fork()) - .ldelim(); + FlowConnectionConfigs_SrcTableIdNameMappingEntry.encode( + { key: key as any, value }, + writer.uint32(50).fork() + ).ldelim(); }); Object.entries(message.tableNameSchemaMapping).forEach(([key, value]) => { - FlowConnectionConfigs_TableNameSchemaMappingEntry.encode({ key: key as any, value }, writer.uint32(58).fork()) - .ldelim(); + FlowConnectionConfigs_TableNameSchemaMappingEntry.encode( + { key: key as any, value }, + writer.uint32(58).fork() + ).ldelim(); }); if (message.metadataPeer !== undefined) { Peer.encode(message.metadataPeer, writer.uint32(66).fork()).ldelim(); @@ -498,7 +521,7 @@ export const FlowConnectionConfigs = { if (message.doInitialCopy === true) { writer.uint32(80).bool(message.doInitialCopy); } - if (message.publicationName !== "") { + if (message.publicationName !== '') { writer.uint32(90).string(message.publicationName); } if (message.snapshotNumRowsPerPartition !== 0) { @@ -516,23 +539,27 @@ export const FlowConnectionConfigs = { if (message.cdcSyncMode !== 0) { writer.uint32(128).int32(message.cdcSyncMode); } - if (message.snapshotStagingPath !== "") { + if (message.snapshotStagingPath !== '') { writer.uint32(138).string(message.snapshotStagingPath); } - if (message.cdcStagingPath !== "") { + if (message.cdcStagingPath !== '') { writer.uint32(146).string(message.cdcStagingPath); } if (message.softDelete === true) { writer.uint32(152).bool(message.softDelete); } - if (message.replicationSlotName !== "") { + if (message.replicationSlotName !== '') { writer.uint32(162).string(message.replicationSlotName); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): FlowConnectionConfigs { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): FlowConnectionConfigs { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseFlowConnectionConfigs(); while (reader.pos < end) { @@ -571,7 +598,10 @@ export const FlowConnectionConfigs = { break; } - const entry5 = FlowConnectionConfigs_TableNameMappingEntry.decode(reader, reader.uint32()); + const entry5 = FlowConnectionConfigs_TableNameMappingEntry.decode( + reader, + reader.uint32() + ); if (entry5.value !== undefined) { message.tableNameMapping[entry5.key] = entry5.value; } @@ -581,7 +611,11 @@ export const FlowConnectionConfigs = { break; } - const entry6 = FlowConnectionConfigs_SrcTableIdNameMappingEntry.decode(reader, reader.uint32()); + const entry6 = + FlowConnectionConfigs_SrcTableIdNameMappingEntry.decode( + reader, + reader.uint32() + ); if (entry6.value !== undefined) { message.srcTableIdNameMapping[entry6.key] = entry6.value; } @@ -591,7 +625,11 @@ export const FlowConnectionConfigs = { break; } - const entry7 = FlowConnectionConfigs_TableNameSchemaMappingEntry.decode(reader, reader.uint32()); + const entry7 = + FlowConnectionConfigs_TableNameSchemaMappingEntry.decode( + reader, + reader.uint32() + ); if (entry7.value !== undefined) { message.tableNameSchemaMapping[entry7.key] = entry7.value; } @@ -699,31 +737,49 @@ export const FlowConnectionConfigs = { fromJSON(object: any): FlowConnectionConfigs { return { source: isSet(object.source) ? Peer.fromJSON(object.source) : undefined, - destination: isSet(object.destination) ? Peer.fromJSON(object.destination) : undefined, - flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", - tableSchema: isSet(object.tableSchema) ? TableSchema.fromJSON(object.tableSchema) : undefined, + destination: isSet(object.destination) + ? Peer.fromJSON(object.destination) + : undefined, + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : '', + tableSchema: isSet(object.tableSchema) + ? TableSchema.fromJSON(object.tableSchema) + : undefined, tableNameMapping: isObject(object.tableNameMapping) - ? Object.entries(object.tableNameMapping).reduce<{ [key: string]: string }>((acc, [key, value]) => { - acc[key] = String(value); - return acc; - }, {}) + ? Object.entries(object.tableNameMapping).reduce<{ + [key: string]: string; + }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) : {}, srcTableIdNameMapping: isObject(object.srcTableIdNameMapping) - ? Object.entries(object.srcTableIdNameMapping).reduce<{ [key: number]: string }>((acc, [key, value]) => { - acc[Number(key)] = String(value); - return acc; - }, {}) + ? Object.entries(object.srcTableIdNameMapping).reduce<{ + [key: number]: string; + }>((acc, [key, value]) => { + acc[Number(key)] = String(value); + return acc; + }, {}) : {}, tableNameSchemaMapping: isObject(object.tableNameSchemaMapping) - ? Object.entries(object.tableNameSchemaMapping).reduce<{ [key: string]: TableSchema }>((acc, [key, value]) => { - acc[key] = TableSchema.fromJSON(value); - return acc; - }, {}) + ? Object.entries(object.tableNameSchemaMapping).reduce<{ + [key: string]: TableSchema; + }>((acc, [key, value]) => { + acc[key] = TableSchema.fromJSON(value); + return acc; + }, {}) : {}, - metadataPeer: isSet(object.metadataPeer) ? Peer.fromJSON(object.metadataPeer) : undefined, - maxBatchSize: isSet(object.maxBatchSize) ? Number(object.maxBatchSize) : 0, - doInitialCopy: isSet(object.doInitialCopy) ? Boolean(object.doInitialCopy) : false, - publicationName: isSet(object.publicationName) ? String(object.publicationName) : "", + metadataPeer: isSet(object.metadataPeer) + ? Peer.fromJSON(object.metadataPeer) + : undefined, + maxBatchSize: isSet(object.maxBatchSize) + ? Number(object.maxBatchSize) + : 0, + doInitialCopy: isSet(object.doInitialCopy) + ? Boolean(object.doInitialCopy) + : false, + publicationName: isSet(object.publicationName) + ? String(object.publicationName) + : '', snapshotNumRowsPerPartition: isSet(object.snapshotNumRowsPerPartition) ? Number(object.snapshotNumRowsPerPartition) : 0, @@ -733,12 +789,22 @@ export const FlowConnectionConfigs = { snapshotNumTablesInParallel: isSet(object.snapshotNumTablesInParallel) ? Number(object.snapshotNumTablesInParallel) : 0, - snapshotSyncMode: isSet(object.snapshotSyncMode) ? qRepSyncModeFromJSON(object.snapshotSyncMode) : 0, - cdcSyncMode: isSet(object.cdcSyncMode) ? qRepSyncModeFromJSON(object.cdcSyncMode) : 0, - snapshotStagingPath: isSet(object.snapshotStagingPath) ? String(object.snapshotStagingPath) : "", - cdcStagingPath: isSet(object.cdcStagingPath) ? String(object.cdcStagingPath) : "", + snapshotSyncMode: isSet(object.snapshotSyncMode) + ? qRepSyncModeFromJSON(object.snapshotSyncMode) + : 0, + cdcSyncMode: isSet(object.cdcSyncMode) + ? qRepSyncModeFromJSON(object.cdcSyncMode) + : 0, + snapshotStagingPath: isSet(object.snapshotStagingPath) + ? String(object.snapshotStagingPath) + : '', + cdcStagingPath: isSet(object.cdcStagingPath) + ? String(object.cdcStagingPath) + : '', softDelete: isSet(object.softDelete) ? Boolean(object.softDelete) : false, - replicationSlotName: isSet(object.replicationSlotName) ? String(object.replicationSlotName) : "", + replicationSlotName: isSet(object.replicationSlotName) + ? String(object.replicationSlotName) + : '', }; }, @@ -750,7 +816,7 @@ export const FlowConnectionConfigs = { if (message.destination !== undefined) { obj.destination = Peer.toJSON(message.destination); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { obj.flowJobName = message.flowJobName; } if (message.tableSchema !== undefined) { @@ -792,17 +858,23 @@ export const FlowConnectionConfigs = { if (message.doInitialCopy === true) { obj.doInitialCopy = message.doInitialCopy; } - if (message.publicationName !== "") { + if (message.publicationName !== '') { obj.publicationName = message.publicationName; } if (message.snapshotNumRowsPerPartition !== 0) { - obj.snapshotNumRowsPerPartition = Math.round(message.snapshotNumRowsPerPartition); + obj.snapshotNumRowsPerPartition = Math.round( + message.snapshotNumRowsPerPartition + ); } if (message.snapshotMaxParallelWorkers !== 0) { - obj.snapshotMaxParallelWorkers = Math.round(message.snapshotMaxParallelWorkers); + obj.snapshotMaxParallelWorkers = Math.round( + message.snapshotMaxParallelWorkers + ); } if (message.snapshotNumTablesInParallel !== 0) { - obj.snapshotNumTablesInParallel = Math.round(message.snapshotNumTablesInParallel); + obj.snapshotNumTablesInParallel = Math.round( + message.snapshotNumTablesInParallel + ); } if (message.snapshotSyncMode !== 0) { obj.snapshotSyncMode = qRepSyncModeToJSON(message.snapshotSyncMode); @@ -810,97 +882,113 @@ export const FlowConnectionConfigs = { if (message.cdcSyncMode !== 0) { obj.cdcSyncMode = qRepSyncModeToJSON(message.cdcSyncMode); } - if (message.snapshotStagingPath !== "") { + if (message.snapshotStagingPath !== '') { obj.snapshotStagingPath = message.snapshotStagingPath; } - if (message.cdcStagingPath !== "") { + if (message.cdcStagingPath !== '') { obj.cdcStagingPath = message.cdcStagingPath; } if (message.softDelete === true) { obj.softDelete = message.softDelete; } - if (message.replicationSlotName !== "") { + if (message.replicationSlotName !== '') { obj.replicationSlotName = message.replicationSlotName; } return obj; }, - create, I>>(base?: I): FlowConnectionConfigs { + create, I>>( + base?: I + ): FlowConnectionConfigs { return FlowConnectionConfigs.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): FlowConnectionConfigs { + fromPartial, I>>( + object: I + ): FlowConnectionConfigs { const message = createBaseFlowConnectionConfigs(); - message.source = (object.source !== undefined && object.source !== null) - ? Peer.fromPartial(object.source) - : undefined; - message.destination = (object.destination !== undefined && object.destination !== null) - ? Peer.fromPartial(object.destination) - : undefined; - message.flowJobName = object.flowJobName ?? ""; - message.tableSchema = (object.tableSchema !== undefined && object.tableSchema !== null) - ? TableSchema.fromPartial(object.tableSchema) - : undefined; - message.tableNameMapping = Object.entries(object.tableNameMapping ?? {}).reduce<{ [key: string]: string }>( - (acc, [key, value]) => { - if (value !== undefined) { - acc[key] = String(value); - } - return acc; - }, - {}, - ); - message.srcTableIdNameMapping = Object.entries(object.srcTableIdNameMapping ?? {}).reduce< - { [key: number]: string } - >((acc, [key, value]) => { + message.source = + object.source !== undefined && object.source !== null + ? Peer.fromPartial(object.source) + : undefined; + message.destination = + object.destination !== undefined && object.destination !== null + ? Peer.fromPartial(object.destination) + : undefined; + message.flowJobName = object.flowJobName ?? ''; + message.tableSchema = + object.tableSchema !== undefined && object.tableSchema !== null + ? TableSchema.fromPartial(object.tableSchema) + : undefined; + message.tableNameMapping = Object.entries( + object.tableNameMapping ?? {} + ).reduce<{ [key: string]: string }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = String(value); + } + return acc; + }, {}); + message.srcTableIdNameMapping = Object.entries( + object.srcTableIdNameMapping ?? {} + ).reduce<{ [key: number]: string }>((acc, [key, value]) => { if (value !== undefined) { acc[Number(key)] = String(value); } return acc; }, {}); - message.tableNameSchemaMapping = Object.entries(object.tableNameSchemaMapping ?? {}).reduce< - { [key: string]: TableSchema } - >((acc, [key, value]) => { + message.tableNameSchemaMapping = Object.entries( + object.tableNameSchemaMapping ?? {} + ).reduce<{ [key: string]: TableSchema }>((acc, [key, value]) => { if (value !== undefined) { acc[key] = TableSchema.fromPartial(value); } return acc; }, {}); - message.metadataPeer = (object.metadataPeer !== undefined && object.metadataPeer !== null) - ? Peer.fromPartial(object.metadataPeer) - : undefined; + message.metadataPeer = + object.metadataPeer !== undefined && object.metadataPeer !== null + ? Peer.fromPartial(object.metadataPeer) + : undefined; message.maxBatchSize = object.maxBatchSize ?? 0; message.doInitialCopy = object.doInitialCopy ?? false; - message.publicationName = object.publicationName ?? ""; - message.snapshotNumRowsPerPartition = object.snapshotNumRowsPerPartition ?? 0; + message.publicationName = object.publicationName ?? ''; + message.snapshotNumRowsPerPartition = + object.snapshotNumRowsPerPartition ?? 0; message.snapshotMaxParallelWorkers = object.snapshotMaxParallelWorkers ?? 0; - message.snapshotNumTablesInParallel = object.snapshotNumTablesInParallel ?? 0; + message.snapshotNumTablesInParallel = + object.snapshotNumTablesInParallel ?? 0; message.snapshotSyncMode = object.snapshotSyncMode ?? 0; message.cdcSyncMode = object.cdcSyncMode ?? 0; - message.snapshotStagingPath = object.snapshotStagingPath ?? ""; - message.cdcStagingPath = object.cdcStagingPath ?? ""; + message.snapshotStagingPath = object.snapshotStagingPath ?? ''; + message.cdcStagingPath = object.cdcStagingPath ?? ''; message.softDelete = object.softDelete ?? false; - message.replicationSlotName = object.replicationSlotName ?? ""; + message.replicationSlotName = object.replicationSlotName ?? ''; return message; }, }; function createBaseFlowConnectionConfigs_TableNameMappingEntry(): FlowConnectionConfigs_TableNameMappingEntry { - return { key: "", value: "" }; + return { key: '', value: '' }; } export const FlowConnectionConfigs_TableNameMappingEntry = { - encode(message: FlowConnectionConfigs_TableNameMappingEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.key !== "") { + encode( + message: FlowConnectionConfigs_TableNameMappingEntry, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.key !== '') { writer.uint32(10).string(message.key); } - if (message.value !== "") { + if (message.value !== '') { writer.uint32(18).string(message.value); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): FlowConnectionConfigs_TableNameMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): FlowConnectionConfigs_TableNameMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseFlowConnectionConfigs_TableNameMappingEntry(); while (reader.pos < end) { @@ -930,57 +1018,73 @@ export const FlowConnectionConfigs_TableNameMappingEntry = { }, fromJSON(object: any): FlowConnectionConfigs_TableNameMappingEntry { - return { key: isSet(object.key) ? String(object.key) : "", value: isSet(object.value) ? String(object.value) : "" }; + return { + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) ? String(object.value) : '', + }; }, toJSON(message: FlowConnectionConfigs_TableNameMappingEntry): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } - if (message.value !== "") { + if (message.value !== '') { obj.value = message.value; } return obj; }, - create, I>>( - base?: I, - ): FlowConnectionConfigs_TableNameMappingEntry { - return FlowConnectionConfigs_TableNameMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact< + DeepPartial, + I + >, + >(base?: I): FlowConnectionConfigs_TableNameMappingEntry { + return FlowConnectionConfigs_TableNameMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): FlowConnectionConfigs_TableNameMappingEntry { + fromPartial< + I extends Exact< + DeepPartial, + I + >, + >(object: I): FlowConnectionConfigs_TableNameMappingEntry { const message = createBaseFlowConnectionConfigs_TableNameMappingEntry(); - message.key = object.key ?? ""; - message.value = object.value ?? ""; + message.key = object.key ?? ''; + message.value = object.value ?? ''; return message; }, }; function createBaseFlowConnectionConfigs_SrcTableIdNameMappingEntry(): FlowConnectionConfigs_SrcTableIdNameMappingEntry { - return { key: 0, value: "" }; + return { key: 0, value: '' }; } export const FlowConnectionConfigs_SrcTableIdNameMappingEntry = { encode( message: FlowConnectionConfigs_SrcTableIdNameMappingEntry, - writer: _m0.Writer = _m0.Writer.create(), + writer: _m0.Writer = _m0.Writer.create() ): _m0.Writer { if (message.key !== 0) { writer.uint32(8).uint32(message.key); } - if (message.value !== "") { + if (message.value !== '') { writer.uint32(18).string(message.value); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): FlowConnectionConfigs_SrcTableIdNameMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): FlowConnectionConfigs_SrcTableIdNameMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseFlowConnectionConfigs_SrcTableIdNameMappingEntry(); + const message = + createBaseFlowConnectionConfigs_SrcTableIdNameMappingEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { @@ -1008,7 +1112,10 @@ export const FlowConnectionConfigs_SrcTableIdNameMappingEntry = { }, fromJSON(object: any): FlowConnectionConfigs_SrcTableIdNameMappingEntry { - return { key: isSet(object.key) ? Number(object.key) : 0, value: isSet(object.value) ? String(object.value) : "" }; + return { + key: isSet(object.key) ? Number(object.key) : 0, + value: isSet(object.value) ? String(object.value) : '', + }; }, toJSON(message: FlowConnectionConfigs_SrcTableIdNameMappingEntry): unknown { @@ -1016,37 +1123,46 @@ export const FlowConnectionConfigs_SrcTableIdNameMappingEntry = { if (message.key !== 0) { obj.key = Math.round(message.key); } - if (message.value !== "") { + if (message.value !== '') { obj.value = message.value; } return obj; }, - create, I>>( - base?: I, - ): FlowConnectionConfigs_SrcTableIdNameMappingEntry { - return FlowConnectionConfigs_SrcTableIdNameMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact< + DeepPartial, + I + >, + >(base?: I): FlowConnectionConfigs_SrcTableIdNameMappingEntry { + return FlowConnectionConfigs_SrcTableIdNameMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): FlowConnectionConfigs_SrcTableIdNameMappingEntry { - const message = createBaseFlowConnectionConfigs_SrcTableIdNameMappingEntry(); + fromPartial< + I extends Exact< + DeepPartial, + I + >, + >(object: I): FlowConnectionConfigs_SrcTableIdNameMappingEntry { + const message = + createBaseFlowConnectionConfigs_SrcTableIdNameMappingEntry(); message.key = object.key ?? 0; - message.value = object.value ?? ""; + message.value = object.value ?? ''; return message; }, }; function createBaseFlowConnectionConfigs_TableNameSchemaMappingEntry(): FlowConnectionConfigs_TableNameSchemaMappingEntry { - return { key: "", value: undefined }; + return { key: '', value: undefined }; } export const FlowConnectionConfigs_TableNameSchemaMappingEntry = { encode( message: FlowConnectionConfigs_TableNameSchemaMappingEntry, - writer: _m0.Writer = _m0.Writer.create(), + writer: _m0.Writer = _m0.Writer.create() ): _m0.Writer { - if (message.key !== "") { + if (message.key !== '') { writer.uint32(10).string(message.key); } if (message.value !== undefined) { @@ -1055,10 +1171,15 @@ export const FlowConnectionConfigs_TableNameSchemaMappingEntry = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): FlowConnectionConfigs_TableNameSchemaMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): FlowConnectionConfigs_TableNameSchemaMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseFlowConnectionConfigs_TableNameSchemaMappingEntry(); + const message = + createBaseFlowConnectionConfigs_TableNameSchemaMappingEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { @@ -1087,14 +1208,16 @@ export const FlowConnectionConfigs_TableNameSchemaMappingEntry = { fromJSON(object: any): FlowConnectionConfigs_TableNameSchemaMappingEntry { return { - key: isSet(object.key) ? String(object.key) : "", - value: isSet(object.value) ? TableSchema.fromJSON(object.value) : undefined, + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) + ? TableSchema.fromJSON(object.value) + : undefined, }; }, toJSON(message: FlowConnectionConfigs_TableNameSchemaMappingEntry): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } if (message.value !== undefined) { @@ -1103,19 +1226,29 @@ export const FlowConnectionConfigs_TableNameSchemaMappingEntry = { return obj; }, - create, I>>( - base?: I, - ): FlowConnectionConfigs_TableNameSchemaMappingEntry { - return FlowConnectionConfigs_TableNameSchemaMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact< + DeepPartial, + I + >, + >(base?: I): FlowConnectionConfigs_TableNameSchemaMappingEntry { + return FlowConnectionConfigs_TableNameSchemaMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): FlowConnectionConfigs_TableNameSchemaMappingEntry { - const message = createBaseFlowConnectionConfigs_TableNameSchemaMappingEntry(); - message.key = object.key ?? ""; - message.value = (object.value !== undefined && object.value !== null) - ? TableSchema.fromPartial(object.value) - : undefined; + fromPartial< + I extends Exact< + DeepPartial, + I + >, + >(object: I): FlowConnectionConfigs_TableNameSchemaMappingEntry { + const message = + createBaseFlowConnectionConfigs_TableNameSchemaMappingEntry(); + message.key = object.key ?? ''; + message.value = + object.value !== undefined && object.value !== null + ? TableSchema.fromPartial(object.value) + : undefined; return message; }, }; @@ -1125,7 +1258,10 @@ function createBaseSyncFlowOptions(): SyncFlowOptions { } export const SyncFlowOptions = { - encode(message: SyncFlowOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: SyncFlowOptions, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.batchSize !== 0) { writer.uint32(8).int32(message.batchSize); } @@ -1133,7 +1269,8 @@ export const SyncFlowOptions = { }, decode(input: _m0.Reader | Uint8Array, length?: number): SyncFlowOptions { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSyncFlowOptions(); while (reader.pos < end) { @@ -1156,7 +1293,9 @@ export const SyncFlowOptions = { }, fromJSON(object: any): SyncFlowOptions { - return { batchSize: isSet(object.batchSize) ? Number(object.batchSize) : 0 }; + return { + batchSize: isSet(object.batchSize) ? Number(object.batchSize) : 0, + }; }, toJSON(message: SyncFlowOptions): unknown { @@ -1167,10 +1306,14 @@ export const SyncFlowOptions = { return obj; }, - create, I>>(base?: I): SyncFlowOptions { + create, I>>( + base?: I + ): SyncFlowOptions { return SyncFlowOptions.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): SyncFlowOptions { + fromPartial, I>>( + object: I + ): SyncFlowOptions { const message = createBaseSyncFlowOptions(); message.batchSize = object.batchSize ?? 0; return message; @@ -1182,15 +1325,22 @@ function createBaseNormalizeFlowOptions(): NormalizeFlowOptions { } export const NormalizeFlowOptions = { - encode(message: NormalizeFlowOptions, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: NormalizeFlowOptions, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.batchSize !== 0) { writer.uint32(8).int32(message.batchSize); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): NormalizeFlowOptions { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): NormalizeFlowOptions { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseNormalizeFlowOptions(); while (reader.pos < end) { @@ -1213,7 +1363,9 @@ export const NormalizeFlowOptions = { }, fromJSON(object: any): NormalizeFlowOptions { - return { batchSize: isSet(object.batchSize) ? Number(object.batchSize) : 0 }; + return { + batchSize: isSet(object.batchSize) ? Number(object.batchSize) : 0, + }; }, toJSON(message: NormalizeFlowOptions): unknown { @@ -1224,10 +1376,14 @@ export const NormalizeFlowOptions = { return obj; }, - create, I>>(base?: I): NormalizeFlowOptions { + create, I>>( + base?: I + ): NormalizeFlowOptions { return NormalizeFlowOptions.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): NormalizeFlowOptions { + fromPartial, I>>( + object: I + ): NormalizeFlowOptions { const message = createBaseNormalizeFlowOptions(); message.batchSize = object.batchSize ?? 0; return message; @@ -1239,18 +1395,25 @@ function createBaseLastSyncState(): LastSyncState { } export const LastSyncState = { - encode(message: LastSyncState, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: LastSyncState, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.checkpoint !== 0) { writer.uint32(8).int64(message.checkpoint); } if (message.lastSyncedAt !== undefined) { - Timestamp.encode(toTimestamp(message.lastSyncedAt), writer.uint32(18).fork()).ldelim(); + Timestamp.encode( + toTimestamp(message.lastSyncedAt), + writer.uint32(18).fork() + ).ldelim(); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): LastSyncState { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseLastSyncState(); while (reader.pos < end) { @@ -1268,7 +1431,9 @@ export const LastSyncState = { break; } - message.lastSyncedAt = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + message.lastSyncedAt = fromTimestamp( + Timestamp.decode(reader, reader.uint32()) + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -1282,7 +1447,9 @@ export const LastSyncState = { fromJSON(object: any): LastSyncState { return { checkpoint: isSet(object.checkpoint) ? Number(object.checkpoint) : 0, - lastSyncedAt: isSet(object.lastSyncedAt) ? fromJsonTimestamp(object.lastSyncedAt) : undefined, + lastSyncedAt: isSet(object.lastSyncedAt) + ? fromJsonTimestamp(object.lastSyncedAt) + : undefined, }; }, @@ -1297,10 +1464,14 @@ export const LastSyncState = { return obj; }, - create, I>>(base?: I): LastSyncState { + create, I>>( + base?: I + ): LastSyncState { return LastSyncState.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): LastSyncState { + fromPartial, I>>( + object: I + ): LastSyncState { const message = createBaseLastSyncState(); message.checkpoint = object.checkpoint ?? 0; message.lastSyncedAt = object.lastSyncedAt ?? undefined; @@ -1309,25 +1480,42 @@ export const LastSyncState = { }; function createBaseStartFlowInput(): StartFlowInput { - return { lastSyncState: undefined, flowConnectionConfigs: undefined, syncFlowOptions: undefined }; + return { + lastSyncState: undefined, + flowConnectionConfigs: undefined, + syncFlowOptions: undefined, + }; } export const StartFlowInput = { - encode(message: StartFlowInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: StartFlowInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.lastSyncState !== undefined) { - LastSyncState.encode(message.lastSyncState, writer.uint32(10).fork()).ldelim(); + LastSyncState.encode( + message.lastSyncState, + writer.uint32(10).fork() + ).ldelim(); } if (message.flowConnectionConfigs !== undefined) { - FlowConnectionConfigs.encode(message.flowConnectionConfigs, writer.uint32(18).fork()).ldelim(); + FlowConnectionConfigs.encode( + message.flowConnectionConfigs, + writer.uint32(18).fork() + ).ldelim(); } if (message.syncFlowOptions !== undefined) { - SyncFlowOptions.encode(message.syncFlowOptions, writer.uint32(26).fork()).ldelim(); + SyncFlowOptions.encode( + message.syncFlowOptions, + writer.uint32(26).fork() + ).ldelim(); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): StartFlowInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseStartFlowInput(); while (reader.pos < end) { @@ -1345,14 +1533,20 @@ export const StartFlowInput = { break; } - message.flowConnectionConfigs = FlowConnectionConfigs.decode(reader, reader.uint32()); + message.flowConnectionConfigs = FlowConnectionConfigs.decode( + reader, + reader.uint32() + ); continue; case 3: if (tag !== 26) { break; } - message.syncFlowOptions = SyncFlowOptions.decode(reader, reader.uint32()); + message.syncFlowOptions = SyncFlowOptions.decode( + reader, + reader.uint32() + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -1365,11 +1559,15 @@ export const StartFlowInput = { fromJSON(object: any): StartFlowInput { return { - lastSyncState: isSet(object.lastSyncState) ? LastSyncState.fromJSON(object.lastSyncState) : undefined, + lastSyncState: isSet(object.lastSyncState) + ? LastSyncState.fromJSON(object.lastSyncState) + : undefined, flowConnectionConfigs: isSet(object.flowConnectionConfigs) ? FlowConnectionConfigs.fromJSON(object.flowConnectionConfigs) : undefined, - syncFlowOptions: isSet(object.syncFlowOptions) ? SyncFlowOptions.fromJSON(object.syncFlowOptions) : undefined, + syncFlowOptions: isSet(object.syncFlowOptions) + ? SyncFlowOptions.fromJSON(object.syncFlowOptions) + : undefined, }; }, @@ -1379,7 +1577,9 @@ export const StartFlowInput = { obj.lastSyncState = LastSyncState.toJSON(message.lastSyncState); } if (message.flowConnectionConfigs !== undefined) { - obj.flowConnectionConfigs = FlowConnectionConfigs.toJSON(message.flowConnectionConfigs); + obj.flowConnectionConfigs = FlowConnectionConfigs.toJSON( + message.flowConnectionConfigs + ); } if (message.syncFlowOptions !== undefined) { obj.syncFlowOptions = SyncFlowOptions.toJSON(message.syncFlowOptions); @@ -1387,21 +1587,28 @@ export const StartFlowInput = { return obj; }, - create, I>>(base?: I): StartFlowInput { + create, I>>( + base?: I + ): StartFlowInput { return StartFlowInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): StartFlowInput { + fromPartial, I>>( + object: I + ): StartFlowInput { const message = createBaseStartFlowInput(); - message.lastSyncState = (object.lastSyncState !== undefined && object.lastSyncState !== null) - ? LastSyncState.fromPartial(object.lastSyncState) - : undefined; + message.lastSyncState = + object.lastSyncState !== undefined && object.lastSyncState !== null + ? LastSyncState.fromPartial(object.lastSyncState) + : undefined; message.flowConnectionConfigs = - (object.flowConnectionConfigs !== undefined && object.flowConnectionConfigs !== null) + object.flowConnectionConfigs !== undefined && + object.flowConnectionConfigs !== null ? FlowConnectionConfigs.fromPartial(object.flowConnectionConfigs) : undefined; - message.syncFlowOptions = (object.syncFlowOptions !== undefined && object.syncFlowOptions !== null) - ? SyncFlowOptions.fromPartial(object.syncFlowOptions) - : undefined; + message.syncFlowOptions = + object.syncFlowOptions !== undefined && object.syncFlowOptions !== null + ? SyncFlowOptions.fromPartial(object.syncFlowOptions) + : undefined; return message; }, }; @@ -1411,15 +1618,22 @@ function createBaseStartNormalizeInput(): StartNormalizeInput { } export const StartNormalizeInput = { - encode(message: StartNormalizeInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: StartNormalizeInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.flowConnectionConfigs !== undefined) { - FlowConnectionConfigs.encode(message.flowConnectionConfigs, writer.uint32(10).fork()).ldelim(); + FlowConnectionConfigs.encode( + message.flowConnectionConfigs, + writer.uint32(10).fork() + ).ldelim(); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): StartNormalizeInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseStartNormalizeInput(); while (reader.pos < end) { @@ -1430,7 +1644,10 @@ export const StartNormalizeInput = { break; } - message.flowConnectionConfigs = FlowConnectionConfigs.decode(reader, reader.uint32()); + message.flowConnectionConfigs = FlowConnectionConfigs.decode( + reader, + reader.uint32() + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -1452,18 +1669,25 @@ export const StartNormalizeInput = { toJSON(message: StartNormalizeInput): unknown { const obj: any = {}; if (message.flowConnectionConfigs !== undefined) { - obj.flowConnectionConfigs = FlowConnectionConfigs.toJSON(message.flowConnectionConfigs); + obj.flowConnectionConfigs = FlowConnectionConfigs.toJSON( + message.flowConnectionConfigs + ); } return obj; }, - create, I>>(base?: I): StartNormalizeInput { + create, I>>( + base?: I + ): StartNormalizeInput { return StartNormalizeInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): StartNormalizeInput { + fromPartial, I>>( + object: I + ): StartNormalizeInput { const message = createBaseStartNormalizeInput(); message.flowConnectionConfigs = - (object.flowConnectionConfigs !== undefined && object.flowConnectionConfigs !== null) + object.flowConnectionConfigs !== undefined && + object.flowConnectionConfigs !== null ? FlowConnectionConfigs.fromPartial(object.flowConnectionConfigs) : undefined; return message; @@ -1471,22 +1695,32 @@ export const StartNormalizeInput = { }; function createBaseGetLastSyncedIDInput(): GetLastSyncedIDInput { - return { peerConnectionConfig: undefined, flowJobName: "" }; + return { peerConnectionConfig: undefined, flowJobName: '' }; } export const GetLastSyncedIDInput = { - encode(message: GetLastSyncedIDInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: GetLastSyncedIDInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.peerConnectionConfig !== undefined) { - Peer.encode(message.peerConnectionConfig, writer.uint32(10).fork()).ldelim(); + Peer.encode( + message.peerConnectionConfig, + writer.uint32(10).fork() + ).ldelim(); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { writer.uint32(18).string(message.flowJobName); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetLastSyncedIDInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): GetLastSyncedIDInput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetLastSyncedIDInput(); while (reader.pos < end) { @@ -1517,8 +1751,10 @@ export const GetLastSyncedIDInput = { fromJSON(object: any): GetLastSyncedIDInput { return { - peerConnectionConfig: isSet(object.peerConnectionConfig) ? Peer.fromJSON(object.peerConnectionConfig) : undefined, - flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", + peerConnectionConfig: isSet(object.peerConnectionConfig) + ? Peer.fromJSON(object.peerConnectionConfig) + : undefined, + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : '', }; }, @@ -1527,45 +1763,65 @@ export const GetLastSyncedIDInput = { if (message.peerConnectionConfig !== undefined) { obj.peerConnectionConfig = Peer.toJSON(message.peerConnectionConfig); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { obj.flowJobName = message.flowJobName; } return obj; }, - create, I>>(base?: I): GetLastSyncedIDInput { + create, I>>( + base?: I + ): GetLastSyncedIDInput { return GetLastSyncedIDInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): GetLastSyncedIDInput { + fromPartial, I>>( + object: I + ): GetLastSyncedIDInput { const message = createBaseGetLastSyncedIDInput(); - message.peerConnectionConfig = (object.peerConnectionConfig !== undefined && object.peerConnectionConfig !== null) - ? Peer.fromPartial(object.peerConnectionConfig) - : undefined; - message.flowJobName = object.flowJobName ?? ""; + message.peerConnectionConfig = + object.peerConnectionConfig !== undefined && + object.peerConnectionConfig !== null + ? Peer.fromPartial(object.peerConnectionConfig) + : undefined; + message.flowJobName = object.flowJobName ?? ''; return message; }, }; function createBaseEnsurePullabilityInput(): EnsurePullabilityInput { - return { peerConnectionConfig: undefined, flowJobName: "", sourceTableIdentifier: "" }; + return { + peerConnectionConfig: undefined, + flowJobName: '', + sourceTableIdentifier: '', + }; } export const EnsurePullabilityInput = { - encode(message: EnsurePullabilityInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: EnsurePullabilityInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.peerConnectionConfig !== undefined) { - Peer.encode(message.peerConnectionConfig, writer.uint32(10).fork()).ldelim(); + Peer.encode( + message.peerConnectionConfig, + writer.uint32(10).fork() + ).ldelim(); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { writer.uint32(18).string(message.flowJobName); } - if (message.sourceTableIdentifier !== "") { + if (message.sourceTableIdentifier !== '') { writer.uint32(26).string(message.sourceTableIdentifier); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): EnsurePullabilityInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): EnsurePullabilityInput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseEnsurePullabilityInput(); while (reader.pos < end) { @@ -1603,9 +1859,13 @@ export const EnsurePullabilityInput = { fromJSON(object: any): EnsurePullabilityInput { return { - peerConnectionConfig: isSet(object.peerConnectionConfig) ? Peer.fromJSON(object.peerConnectionConfig) : undefined, - flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", - sourceTableIdentifier: isSet(object.sourceTableIdentifier) ? String(object.sourceTableIdentifier) : "", + peerConnectionConfig: isSet(object.peerConnectionConfig) + ? Peer.fromJSON(object.peerConnectionConfig) + : undefined, + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : '', + sourceTableIdentifier: isSet(object.sourceTableIdentifier) + ? String(object.sourceTableIdentifier) + : '', }; }, @@ -1614,39 +1874,55 @@ export const EnsurePullabilityInput = { if (message.peerConnectionConfig !== undefined) { obj.peerConnectionConfig = Peer.toJSON(message.peerConnectionConfig); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { obj.flowJobName = message.flowJobName; } - if (message.sourceTableIdentifier !== "") { + if (message.sourceTableIdentifier !== '') { obj.sourceTableIdentifier = message.sourceTableIdentifier; } return obj; }, - create, I>>(base?: I): EnsurePullabilityInput { + create, I>>( + base?: I + ): EnsurePullabilityInput { return EnsurePullabilityInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): EnsurePullabilityInput { + fromPartial, I>>( + object: I + ): EnsurePullabilityInput { const message = createBaseEnsurePullabilityInput(); - message.peerConnectionConfig = (object.peerConnectionConfig !== undefined && object.peerConnectionConfig !== null) - ? Peer.fromPartial(object.peerConnectionConfig) - : undefined; - message.flowJobName = object.flowJobName ?? ""; - message.sourceTableIdentifier = object.sourceTableIdentifier ?? ""; + message.peerConnectionConfig = + object.peerConnectionConfig !== undefined && + object.peerConnectionConfig !== null + ? Peer.fromPartial(object.peerConnectionConfig) + : undefined; + message.flowJobName = object.flowJobName ?? ''; + message.sourceTableIdentifier = object.sourceTableIdentifier ?? ''; return message; }, }; function createBaseEnsurePullabilityBatchInput(): EnsurePullabilityBatchInput { - return { peerConnectionConfig: undefined, flowJobName: "", sourceTableIdentifiers: [] }; + return { + peerConnectionConfig: undefined, + flowJobName: '', + sourceTableIdentifiers: [], + }; } export const EnsurePullabilityBatchInput = { - encode(message: EnsurePullabilityBatchInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: EnsurePullabilityBatchInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.peerConnectionConfig !== undefined) { - Peer.encode(message.peerConnectionConfig, writer.uint32(10).fork()).ldelim(); + Peer.encode( + message.peerConnectionConfig, + writer.uint32(10).fork() + ).ldelim(); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { writer.uint32(18).string(message.flowJobName); } for (const v of message.sourceTableIdentifiers) { @@ -1655,8 +1931,12 @@ export const EnsurePullabilityBatchInput = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): EnsurePullabilityBatchInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): EnsurePullabilityBatchInput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseEnsurePullabilityBatchInput(); while (reader.pos < end) { @@ -1694,8 +1974,10 @@ export const EnsurePullabilityBatchInput = { fromJSON(object: any): EnsurePullabilityBatchInput { return { - peerConnectionConfig: isSet(object.peerConnectionConfig) ? Peer.fromJSON(object.peerConnectionConfig) : undefined, - flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", + peerConnectionConfig: isSet(object.peerConnectionConfig) + ? Peer.fromJSON(object.peerConnectionConfig) + : undefined, + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : '', sourceTableIdentifiers: Array.isArray(object?.sourceTableIdentifiers) ? object.sourceTableIdentifiers.map((e: any) => String(e)) : [], @@ -1707,7 +1989,7 @@ export const EnsurePullabilityBatchInput = { if (message.peerConnectionConfig !== undefined) { obj.peerConnectionConfig = Peer.toJSON(message.peerConnectionConfig); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { obj.flowJobName = message.flowJobName; } if (message.sourceTableIdentifiers?.length) { @@ -1716,16 +1998,23 @@ export const EnsurePullabilityBatchInput = { return obj; }, - create, I>>(base?: I): EnsurePullabilityBatchInput { + create, I>>( + base?: I + ): EnsurePullabilityBatchInput { return EnsurePullabilityBatchInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): EnsurePullabilityBatchInput { + fromPartial, I>>( + object: I + ): EnsurePullabilityBatchInput { const message = createBaseEnsurePullabilityBatchInput(); - message.peerConnectionConfig = (object.peerConnectionConfig !== undefined && object.peerConnectionConfig !== null) - ? Peer.fromPartial(object.peerConnectionConfig) - : undefined; - message.flowJobName = object.flowJobName ?? ""; - message.sourceTableIdentifiers = object.sourceTableIdentifiers?.map((e) => e) || []; + message.peerConnectionConfig = + object.peerConnectionConfig !== undefined && + object.peerConnectionConfig !== null + ? Peer.fromPartial(object.peerConnectionConfig) + : undefined; + message.flowJobName = object.flowJobName ?? ''; + message.sourceTableIdentifiers = + object.sourceTableIdentifiers?.map((e) => e) || []; return message; }, }; @@ -1735,15 +2024,22 @@ function createBasePostgresTableIdentifier(): PostgresTableIdentifier { } export const PostgresTableIdentifier = { - encode(message: PostgresTableIdentifier, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: PostgresTableIdentifier, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.relId !== 0) { writer.uint32(8).uint32(message.relId); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): PostgresTableIdentifier { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): PostgresTableIdentifier { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBasePostgresTableIdentifier(); while (reader.pos < end) { @@ -1777,10 +2073,14 @@ export const PostgresTableIdentifier = { return obj; }, - create, I>>(base?: I): PostgresTableIdentifier { + create, I>>( + base?: I + ): PostgresTableIdentifier { return PostgresTableIdentifier.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): PostgresTableIdentifier { + fromPartial, I>>( + object: I + ): PostgresTableIdentifier { const message = createBasePostgresTableIdentifier(); message.relId = object.relId ?? 0; return message; @@ -1792,15 +2092,22 @@ function createBaseTableIdentifier(): TableIdentifier { } export const TableIdentifier = { - encode(message: TableIdentifier, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: TableIdentifier, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.postgresTableIdentifier !== undefined) { - PostgresTableIdentifier.encode(message.postgresTableIdentifier, writer.uint32(10).fork()).ldelim(); + PostgresTableIdentifier.encode( + message.postgresTableIdentifier, + writer.uint32(10).fork() + ).ldelim(); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): TableIdentifier { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTableIdentifier(); while (reader.pos < end) { @@ -1811,7 +2118,10 @@ export const TableIdentifier = { break; } - message.postgresTableIdentifier = PostgresTableIdentifier.decode(reader, reader.uint32()); + message.postgresTableIdentifier = PostgresTableIdentifier.decode( + reader, + reader.uint32() + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -1833,18 +2143,25 @@ export const TableIdentifier = { toJSON(message: TableIdentifier): unknown { const obj: any = {}; if (message.postgresTableIdentifier !== undefined) { - obj.postgresTableIdentifier = PostgresTableIdentifier.toJSON(message.postgresTableIdentifier); + obj.postgresTableIdentifier = PostgresTableIdentifier.toJSON( + message.postgresTableIdentifier + ); } return obj; }, - create, I>>(base?: I): TableIdentifier { + create, I>>( + base?: I + ): TableIdentifier { return TableIdentifier.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): TableIdentifier { + fromPartial, I>>( + object: I + ): TableIdentifier { const message = createBaseTableIdentifier(); message.postgresTableIdentifier = - (object.postgresTableIdentifier !== undefined && object.postgresTableIdentifier !== null) + object.postgresTableIdentifier !== undefined && + object.postgresTableIdentifier !== null ? PostgresTableIdentifier.fromPartial(object.postgresTableIdentifier) : undefined; return message; @@ -1856,15 +2173,25 @@ function createBaseEnsurePullabilityOutput(): EnsurePullabilityOutput { } export const EnsurePullabilityOutput = { - encode(message: EnsurePullabilityOutput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: EnsurePullabilityOutput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.tableIdentifier !== undefined) { - TableIdentifier.encode(message.tableIdentifier, writer.uint32(10).fork()).ldelim(); + TableIdentifier.encode( + message.tableIdentifier, + writer.uint32(10).fork() + ).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): EnsurePullabilityOutput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): EnsurePullabilityOutput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseEnsurePullabilityOutput(); while (reader.pos < end) { @@ -1875,7 +2202,10 @@ export const EnsurePullabilityOutput = { break; } - message.tableIdentifier = TableIdentifier.decode(reader, reader.uint32()); + message.tableIdentifier = TableIdentifier.decode( + reader, + reader.uint32() + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -1888,7 +2218,9 @@ export const EnsurePullabilityOutput = { fromJSON(object: any): EnsurePullabilityOutput { return { - tableIdentifier: isSet(object.tableIdentifier) ? TableIdentifier.fromJSON(object.tableIdentifier) : undefined, + tableIdentifier: isSet(object.tableIdentifier) + ? TableIdentifier.fromJSON(object.tableIdentifier) + : undefined, }; }, @@ -1900,14 +2232,19 @@ export const EnsurePullabilityOutput = { return obj; }, - create, I>>(base?: I): EnsurePullabilityOutput { + create, I>>( + base?: I + ): EnsurePullabilityOutput { return EnsurePullabilityOutput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): EnsurePullabilityOutput { + fromPartial, I>>( + object: I + ): EnsurePullabilityOutput { const message = createBaseEnsurePullabilityOutput(); - message.tableIdentifier = (object.tableIdentifier !== undefined && object.tableIdentifier !== null) - ? TableIdentifier.fromPartial(object.tableIdentifier) - : undefined; + message.tableIdentifier = + object.tableIdentifier !== undefined && object.tableIdentifier !== null + ? TableIdentifier.fromPartial(object.tableIdentifier) + : undefined; return message; }, }; @@ -1917,18 +2254,25 @@ function createBaseEnsurePullabilityBatchOutput(): EnsurePullabilityBatchOutput } export const EnsurePullabilityBatchOutput = { - encode(message: EnsurePullabilityBatchOutput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: EnsurePullabilityBatchOutput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { Object.entries(message.tableIdentifierMapping).forEach(([key, value]) => { EnsurePullabilityBatchOutput_TableIdentifierMappingEntry.encode( { key: key as any, value }, - writer.uint32(10).fork(), + writer.uint32(10).fork() ).ldelim(); }); return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): EnsurePullabilityBatchOutput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): EnsurePullabilityBatchOutput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseEnsurePullabilityBatchOutput(); while (reader.pos < end) { @@ -1939,7 +2283,11 @@ export const EnsurePullabilityBatchOutput = { break; } - const entry1 = EnsurePullabilityBatchOutput_TableIdentifierMappingEntry.decode(reader, reader.uint32()); + const entry1 = + EnsurePullabilityBatchOutput_TableIdentifierMappingEntry.decode( + reader, + reader.uint32() + ); if (entry1.value !== undefined) { message.tableIdentifierMapping[entry1.key] = entry1.value; } @@ -1956,13 +2304,12 @@ export const EnsurePullabilityBatchOutput = { fromJSON(object: any): EnsurePullabilityBatchOutput { return { tableIdentifierMapping: isObject(object.tableIdentifierMapping) - ? Object.entries(object.tableIdentifierMapping).reduce<{ [key: string]: TableIdentifier }>( - (acc, [key, value]) => { + ? Object.entries(object.tableIdentifierMapping).reduce<{ + [key: string]: TableIdentifier; + }>((acc, [key, value]) => { acc[key] = TableIdentifier.fromJSON(value); return acc; - }, - {}, - ) + }, {}) : {}, }; }, @@ -1981,14 +2328,18 @@ export const EnsurePullabilityBatchOutput = { return obj; }, - create, I>>(base?: I): EnsurePullabilityBatchOutput { + create, I>>( + base?: I + ): EnsurePullabilityBatchOutput { return EnsurePullabilityBatchOutput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): EnsurePullabilityBatchOutput { + fromPartial, I>>( + object: I + ): EnsurePullabilityBatchOutput { const message = createBaseEnsurePullabilityBatchOutput(); - message.tableIdentifierMapping = Object.entries(object.tableIdentifierMapping ?? {}).reduce< - { [key: string]: TableIdentifier } - >((acc, [key, value]) => { + message.tableIdentifierMapping = Object.entries( + object.tableIdentifierMapping ?? {} + ).reduce<{ [key: string]: TableIdentifier }>((acc, [key, value]) => { if (value !== undefined) { acc[key] = TableIdentifier.fromPartial(value); } @@ -1999,15 +2350,15 @@ export const EnsurePullabilityBatchOutput = { }; function createBaseEnsurePullabilityBatchOutput_TableIdentifierMappingEntry(): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { - return { key: "", value: undefined }; + return { key: '', value: undefined }; } export const EnsurePullabilityBatchOutput_TableIdentifierMappingEntry = { encode( message: EnsurePullabilityBatchOutput_TableIdentifierMappingEntry, - writer: _m0.Writer = _m0.Writer.create(), + writer: _m0.Writer = _m0.Writer.create() ): _m0.Writer { - if (message.key !== "") { + if (message.key !== '') { writer.uint32(10).string(message.key); } if (message.value !== undefined) { @@ -2016,10 +2367,15 @@ export const EnsurePullabilityBatchOutput_TableIdentifierMappingEntry = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseEnsurePullabilityBatchOutput_TableIdentifierMappingEntry(); + const message = + createBaseEnsurePullabilityBatchOutput_TableIdentifierMappingEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { @@ -2046,16 +2402,22 @@ export const EnsurePullabilityBatchOutput_TableIdentifierMappingEntry = { return message; }, - fromJSON(object: any): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { + fromJSON( + object: any + ): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { return { - key: isSet(object.key) ? String(object.key) : "", - value: isSet(object.value) ? TableIdentifier.fromJSON(object.value) : undefined, + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) + ? TableIdentifier.fromJSON(object.value) + : undefined, }; }, - toJSON(message: EnsurePullabilityBatchOutput_TableIdentifierMappingEntry): unknown { + toJSON( + message: EnsurePullabilityBatchOutput_TableIdentifierMappingEntry + ): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } if (message.value !== undefined) { @@ -2064,19 +2426,29 @@ export const EnsurePullabilityBatchOutput_TableIdentifierMappingEntry = { return obj; }, - create, I>>( - base?: I, - ): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { - return EnsurePullabilityBatchOutput_TableIdentifierMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact< + DeepPartial, + I + >, + >(base?: I): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { + return EnsurePullabilityBatchOutput_TableIdentifierMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { - const message = createBaseEnsurePullabilityBatchOutput_TableIdentifierMappingEntry(); - message.key = object.key ?? ""; - message.value = (object.value !== undefined && object.value !== null) - ? TableIdentifier.fromPartial(object.value) - : undefined; + fromPartial< + I extends Exact< + DeepPartial, + I + >, + >(object: I): EnsurePullabilityBatchOutput_TableIdentifierMappingEntry { + const message = + createBaseEnsurePullabilityBatchOutput_TableIdentifierMappingEntry(); + message.key = object.key ?? ''; + message.value = + object.value !== undefined && object.value !== null + ? TableIdentifier.fromPartial(object.value) + : undefined; return message; }, }; @@ -2084,25 +2456,34 @@ export const EnsurePullabilityBatchOutput_TableIdentifierMappingEntry = { function createBaseSetupReplicationInput(): SetupReplicationInput { return { peerConnectionConfig: undefined, - flowJobName: "", + flowJobName: '', tableNameMapping: {}, destinationPeer: undefined, doInitialCopy: false, - existingPublicationName: "", - existingReplicationSlotName: "", + existingPublicationName: '', + existingReplicationSlotName: '', }; } export const SetupReplicationInput = { - encode(message: SetupReplicationInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: SetupReplicationInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.peerConnectionConfig !== undefined) { - Peer.encode(message.peerConnectionConfig, writer.uint32(10).fork()).ldelim(); + Peer.encode( + message.peerConnectionConfig, + writer.uint32(10).fork() + ).ldelim(); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { writer.uint32(18).string(message.flowJobName); } Object.entries(message.tableNameMapping).forEach(([key, value]) => { - SetupReplicationInput_TableNameMappingEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).ldelim(); + SetupReplicationInput_TableNameMappingEntry.encode( + { key: key as any, value }, + writer.uint32(26).fork() + ).ldelim(); }); if (message.destinationPeer !== undefined) { Peer.encode(message.destinationPeer, writer.uint32(34).fork()).ldelim(); @@ -2110,17 +2491,21 @@ export const SetupReplicationInput = { if (message.doInitialCopy === true) { writer.uint32(40).bool(message.doInitialCopy); } - if (message.existingPublicationName !== "") { + if (message.existingPublicationName !== '') { writer.uint32(50).string(message.existingPublicationName); } - if (message.existingReplicationSlotName !== "") { + if (message.existingReplicationSlotName !== '') { writer.uint32(58).string(message.existingReplicationSlotName); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupReplicationInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupReplicationInput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSetupReplicationInput(); while (reader.pos < end) { @@ -2145,7 +2530,10 @@ export const SetupReplicationInput = { break; } - const entry3 = SetupReplicationInput_TableNameMappingEntry.decode(reader, reader.uint32()); + const entry3 = SetupReplicationInput_TableNameMappingEntry.decode( + reader, + reader.uint32() + ); if (entry3.value !== undefined) { message.tableNameMapping[entry3.key] = entry3.value; } @@ -2189,20 +2577,30 @@ export const SetupReplicationInput = { fromJSON(object: any): SetupReplicationInput { return { - peerConnectionConfig: isSet(object.peerConnectionConfig) ? Peer.fromJSON(object.peerConnectionConfig) : undefined, - flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", + peerConnectionConfig: isSet(object.peerConnectionConfig) + ? Peer.fromJSON(object.peerConnectionConfig) + : undefined, + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : '', tableNameMapping: isObject(object.tableNameMapping) - ? Object.entries(object.tableNameMapping).reduce<{ [key: string]: string }>((acc, [key, value]) => { - acc[key] = String(value); - return acc; - }, {}) + ? Object.entries(object.tableNameMapping).reduce<{ + [key: string]: string; + }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) : {}, - destinationPeer: isSet(object.destinationPeer) ? Peer.fromJSON(object.destinationPeer) : undefined, - doInitialCopy: isSet(object.doInitialCopy) ? Boolean(object.doInitialCopy) : false, - existingPublicationName: isSet(object.existingPublicationName) ? String(object.existingPublicationName) : "", + destinationPeer: isSet(object.destinationPeer) + ? Peer.fromJSON(object.destinationPeer) + : undefined, + doInitialCopy: isSet(object.doInitialCopy) + ? Boolean(object.doInitialCopy) + : false, + existingPublicationName: isSet(object.existingPublicationName) + ? String(object.existingPublicationName) + : '', existingReplicationSlotName: isSet(object.existingReplicationSlotName) ? String(object.existingReplicationSlotName) - : "", + : '', }; }, @@ -2211,7 +2609,7 @@ export const SetupReplicationInput = { if (message.peerConnectionConfig !== undefined) { obj.peerConnectionConfig = Peer.toJSON(message.peerConnectionConfig); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { obj.flowJobName = message.flowJobName; } if (message.tableNameMapping) { @@ -2229,60 +2627,74 @@ export const SetupReplicationInput = { if (message.doInitialCopy === true) { obj.doInitialCopy = message.doInitialCopy; } - if (message.existingPublicationName !== "") { + if (message.existingPublicationName !== '') { obj.existingPublicationName = message.existingPublicationName; } - if (message.existingReplicationSlotName !== "") { + if (message.existingReplicationSlotName !== '') { obj.existingReplicationSlotName = message.existingReplicationSlotName; } return obj; }, - create, I>>(base?: I): SetupReplicationInput { + create, I>>( + base?: I + ): SetupReplicationInput { return SetupReplicationInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): SetupReplicationInput { + fromPartial, I>>( + object: I + ): SetupReplicationInput { const message = createBaseSetupReplicationInput(); - message.peerConnectionConfig = (object.peerConnectionConfig !== undefined && object.peerConnectionConfig !== null) - ? Peer.fromPartial(object.peerConnectionConfig) - : undefined; - message.flowJobName = object.flowJobName ?? ""; - message.tableNameMapping = Object.entries(object.tableNameMapping ?? {}).reduce<{ [key: string]: string }>( - (acc, [key, value]) => { - if (value !== undefined) { - acc[key] = String(value); - } - return acc; - }, - {}, - ); - message.destinationPeer = (object.destinationPeer !== undefined && object.destinationPeer !== null) - ? Peer.fromPartial(object.destinationPeer) - : undefined; + message.peerConnectionConfig = + object.peerConnectionConfig !== undefined && + object.peerConnectionConfig !== null + ? Peer.fromPartial(object.peerConnectionConfig) + : undefined; + message.flowJobName = object.flowJobName ?? ''; + message.tableNameMapping = Object.entries( + object.tableNameMapping ?? {} + ).reduce<{ [key: string]: string }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = String(value); + } + return acc; + }, {}); + message.destinationPeer = + object.destinationPeer !== undefined && object.destinationPeer !== null + ? Peer.fromPartial(object.destinationPeer) + : undefined; message.doInitialCopy = object.doInitialCopy ?? false; - message.existingPublicationName = object.existingPublicationName ?? ""; - message.existingReplicationSlotName = object.existingReplicationSlotName ?? ""; + message.existingPublicationName = object.existingPublicationName ?? ''; + message.existingReplicationSlotName = + object.existingReplicationSlotName ?? ''; return message; }, }; function createBaseSetupReplicationInput_TableNameMappingEntry(): SetupReplicationInput_TableNameMappingEntry { - return { key: "", value: "" }; + return { key: '', value: '' }; } export const SetupReplicationInput_TableNameMappingEntry = { - encode(message: SetupReplicationInput_TableNameMappingEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.key !== "") { + encode( + message: SetupReplicationInput_TableNameMappingEntry, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.key !== '') { writer.uint32(10).string(message.key); } - if (message.value !== "") { + if (message.value !== '') { writer.uint32(18).string(message.value); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupReplicationInput_TableNameMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupReplicationInput_TableNameMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSetupReplicationInput_TableNameMappingEntry(); while (reader.pos < end) { @@ -2312,52 +2724,70 @@ export const SetupReplicationInput_TableNameMappingEntry = { }, fromJSON(object: any): SetupReplicationInput_TableNameMappingEntry { - return { key: isSet(object.key) ? String(object.key) : "", value: isSet(object.value) ? String(object.value) : "" }; + return { + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) ? String(object.value) : '', + }; }, toJSON(message: SetupReplicationInput_TableNameMappingEntry): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } - if (message.value !== "") { + if (message.value !== '') { obj.value = message.value; } return obj; }, - create, I>>( - base?: I, - ): SetupReplicationInput_TableNameMappingEntry { - return SetupReplicationInput_TableNameMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact< + DeepPartial, + I + >, + >(base?: I): SetupReplicationInput_TableNameMappingEntry { + return SetupReplicationInput_TableNameMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): SetupReplicationInput_TableNameMappingEntry { + fromPartial< + I extends Exact< + DeepPartial, + I + >, + >(object: I): SetupReplicationInput_TableNameMappingEntry { const message = createBaseSetupReplicationInput_TableNameMappingEntry(); - message.key = object.key ?? ""; - message.value = object.value ?? ""; + message.key = object.key ?? ''; + message.value = object.value ?? ''; return message; }, }; function createBaseSetupReplicationOutput(): SetupReplicationOutput { - return { slotName: "", snapshotName: "" }; + return { slotName: '', snapshotName: '' }; } export const SetupReplicationOutput = { - encode(message: SetupReplicationOutput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.slotName !== "") { + encode( + message: SetupReplicationOutput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.slotName !== '') { writer.uint32(10).string(message.slotName); } - if (message.snapshotName !== "") { + if (message.snapshotName !== '') { writer.uint32(18).string(message.snapshotName); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupReplicationOutput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupReplicationOutput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSetupReplicationOutput(); while (reader.pos < end) { @@ -2388,47 +2818,67 @@ export const SetupReplicationOutput = { fromJSON(object: any): SetupReplicationOutput { return { - slotName: isSet(object.slotName) ? String(object.slotName) : "", - snapshotName: isSet(object.snapshotName) ? String(object.snapshotName) : "", + slotName: isSet(object.slotName) ? String(object.slotName) : '', + snapshotName: isSet(object.snapshotName) + ? String(object.snapshotName) + : '', }; }, toJSON(message: SetupReplicationOutput): unknown { const obj: any = {}; - if (message.slotName !== "") { + if (message.slotName !== '') { obj.slotName = message.slotName; } - if (message.snapshotName !== "") { + if (message.snapshotName !== '') { obj.snapshotName = message.snapshotName; } return obj; }, - create, I>>(base?: I): SetupReplicationOutput { + create, I>>( + base?: I + ): SetupReplicationOutput { return SetupReplicationOutput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): SetupReplicationOutput { + fromPartial, I>>( + object: I + ): SetupReplicationOutput { const message = createBaseSetupReplicationOutput(); - message.slotName = object.slotName ?? ""; - message.snapshotName = object.snapshotName ?? ""; + message.slotName = object.slotName ?? ''; + message.snapshotName = object.snapshotName ?? ''; return message; }, }; function createBaseCreateRawTableInput(): CreateRawTableInput { - return { peerConnectionConfig: undefined, flowJobName: "", tableNameMapping: {}, cdcSyncMode: 0 }; + return { + peerConnectionConfig: undefined, + flowJobName: '', + tableNameMapping: {}, + cdcSyncMode: 0, + }; } export const CreateRawTableInput = { - encode(message: CreateRawTableInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: CreateRawTableInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.peerConnectionConfig !== undefined) { - Peer.encode(message.peerConnectionConfig, writer.uint32(10).fork()).ldelim(); + Peer.encode( + message.peerConnectionConfig, + writer.uint32(10).fork() + ).ldelim(); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { writer.uint32(18).string(message.flowJobName); } Object.entries(message.tableNameMapping).forEach(([key, value]) => { - CreateRawTableInput_TableNameMappingEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).ldelim(); + CreateRawTableInput_TableNameMappingEntry.encode( + { key: key as any, value }, + writer.uint32(26).fork() + ).ldelim(); }); if (message.cdcSyncMode !== 0) { writer.uint32(32).int32(message.cdcSyncMode); @@ -2437,7 +2887,8 @@ export const CreateRawTableInput = { }, decode(input: _m0.Reader | Uint8Array, length?: number): CreateRawTableInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseCreateRawTableInput(); while (reader.pos < end) { @@ -2462,7 +2913,10 @@ export const CreateRawTableInput = { break; } - const entry3 = CreateRawTableInput_TableNameMappingEntry.decode(reader, reader.uint32()); + const entry3 = CreateRawTableInput_TableNameMappingEntry.decode( + reader, + reader.uint32() + ); if (entry3.value !== undefined) { message.tableNameMapping[entry3.key] = entry3.value; } @@ -2485,15 +2939,21 @@ export const CreateRawTableInput = { fromJSON(object: any): CreateRawTableInput { return { - peerConnectionConfig: isSet(object.peerConnectionConfig) ? Peer.fromJSON(object.peerConnectionConfig) : undefined, - flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", + peerConnectionConfig: isSet(object.peerConnectionConfig) + ? Peer.fromJSON(object.peerConnectionConfig) + : undefined, + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : '', tableNameMapping: isObject(object.tableNameMapping) - ? Object.entries(object.tableNameMapping).reduce<{ [key: string]: string }>((acc, [key, value]) => { - acc[key] = String(value); - return acc; - }, {}) + ? Object.entries(object.tableNameMapping).reduce<{ + [key: string]: string; + }>((acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, {}) : {}, - cdcSyncMode: isSet(object.cdcSyncMode) ? qRepSyncModeFromJSON(object.cdcSyncMode) : 0, + cdcSyncMode: isSet(object.cdcSyncMode) + ? qRepSyncModeFromJSON(object.cdcSyncMode) + : 0, }; }, @@ -2502,7 +2962,7 @@ export const CreateRawTableInput = { if (message.peerConnectionConfig !== undefined) { obj.peerConnectionConfig = Peer.toJSON(message.peerConnectionConfig); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { obj.flowJobName = message.flowJobName; } if (message.tableNameMapping) { @@ -2520,46 +2980,58 @@ export const CreateRawTableInput = { return obj; }, - create, I>>(base?: I): CreateRawTableInput { + create, I>>( + base?: I + ): CreateRawTableInput { return CreateRawTableInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): CreateRawTableInput { + fromPartial, I>>( + object: I + ): CreateRawTableInput { const message = createBaseCreateRawTableInput(); - message.peerConnectionConfig = (object.peerConnectionConfig !== undefined && object.peerConnectionConfig !== null) - ? Peer.fromPartial(object.peerConnectionConfig) - : undefined; - message.flowJobName = object.flowJobName ?? ""; - message.tableNameMapping = Object.entries(object.tableNameMapping ?? {}).reduce<{ [key: string]: string }>( - (acc, [key, value]) => { - if (value !== undefined) { - acc[key] = String(value); - } - return acc; - }, - {}, - ); + message.peerConnectionConfig = + object.peerConnectionConfig !== undefined && + object.peerConnectionConfig !== null + ? Peer.fromPartial(object.peerConnectionConfig) + : undefined; + message.flowJobName = object.flowJobName ?? ''; + message.tableNameMapping = Object.entries( + object.tableNameMapping ?? {} + ).reduce<{ [key: string]: string }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = String(value); + } + return acc; + }, {}); message.cdcSyncMode = object.cdcSyncMode ?? 0; return message; }, }; function createBaseCreateRawTableInput_TableNameMappingEntry(): CreateRawTableInput_TableNameMappingEntry { - return { key: "", value: "" }; + return { key: '', value: '' }; } export const CreateRawTableInput_TableNameMappingEntry = { - encode(message: CreateRawTableInput_TableNameMappingEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.key !== "") { + encode( + message: CreateRawTableInput_TableNameMappingEntry, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.key !== '') { writer.uint32(10).string(message.key); } - if (message.value !== "") { + if (message.value !== '') { writer.uint32(18).string(message.value); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CreateRawTableInput_TableNameMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): CreateRawTableInput_TableNameMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseCreateRawTableInput_TableNameMappingEntry(); while (reader.pos < end) { @@ -2589,49 +3061,61 @@ export const CreateRawTableInput_TableNameMappingEntry = { }, fromJSON(object: any): CreateRawTableInput_TableNameMappingEntry { - return { key: isSet(object.key) ? String(object.key) : "", value: isSet(object.value) ? String(object.value) : "" }; + return { + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) ? String(object.value) : '', + }; }, toJSON(message: CreateRawTableInput_TableNameMappingEntry): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } - if (message.value !== "") { + if (message.value !== '') { obj.value = message.value; } return obj; }, - create, I>>( - base?: I, - ): CreateRawTableInput_TableNameMappingEntry { - return CreateRawTableInput_TableNameMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact, I>, + >(base?: I): CreateRawTableInput_TableNameMappingEntry { + return CreateRawTableInput_TableNameMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): CreateRawTableInput_TableNameMappingEntry { + fromPartial< + I extends Exact, I>, + >(object: I): CreateRawTableInput_TableNameMappingEntry { const message = createBaseCreateRawTableInput_TableNameMappingEntry(); - message.key = object.key ?? ""; - message.value = object.value ?? ""; + message.key = object.key ?? ''; + message.value = object.value ?? ''; return message; }, }; function createBaseCreateRawTableOutput(): CreateRawTableOutput { - return { tableIdentifier: "" }; + return { tableIdentifier: '' }; } export const CreateRawTableOutput = { - encode(message: CreateRawTableOutput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.tableIdentifier !== "") { + encode( + message: CreateRawTableOutput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.tableIdentifier !== '') { writer.uint32(10).string(message.tableIdentifier); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CreateRawTableOutput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): CreateRawTableOutput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseCreateRawTableOutput(); while (reader.pos < end) { @@ -2654,47 +3138,62 @@ export const CreateRawTableOutput = { }, fromJSON(object: any): CreateRawTableOutput { - return { tableIdentifier: isSet(object.tableIdentifier) ? String(object.tableIdentifier) : "" }; + return { + tableIdentifier: isSet(object.tableIdentifier) + ? String(object.tableIdentifier) + : '', + }; }, toJSON(message: CreateRawTableOutput): unknown { const obj: any = {}; - if (message.tableIdentifier !== "") { + if (message.tableIdentifier !== '') { obj.tableIdentifier = message.tableIdentifier; } return obj; }, - create, I>>(base?: I): CreateRawTableOutput { + create, I>>( + base?: I + ): CreateRawTableOutput { return CreateRawTableOutput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): CreateRawTableOutput { + fromPartial, I>>( + object: I + ): CreateRawTableOutput { const message = createBaseCreateRawTableOutput(); - message.tableIdentifier = object.tableIdentifier ?? ""; + message.tableIdentifier = object.tableIdentifier ?? ''; return message; }, }; function createBaseTableSchema(): TableSchema { - return { tableIdentifier: "", columns: {}, primaryKeyColumn: "" }; + return { tableIdentifier: '', columns: {}, primaryKeyColumn: '' }; } export const TableSchema = { - encode(message: TableSchema, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.tableIdentifier !== "") { + encode( + message: TableSchema, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.tableIdentifier !== '') { writer.uint32(10).string(message.tableIdentifier); } Object.entries(message.columns).forEach(([key, value]) => { - TableSchema_ColumnsEntry.encode({ key: key as any, value }, writer.uint32(18).fork()).ldelim(); + TableSchema_ColumnsEntry.encode( + { key: key as any, value }, + writer.uint32(18).fork() + ).ldelim(); }); - if (message.primaryKeyColumn !== "") { + if (message.primaryKeyColumn !== '') { writer.uint32(26).string(message.primaryKeyColumn); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): TableSchema { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTableSchema(); while (reader.pos < end) { @@ -2712,7 +3211,10 @@ export const TableSchema = { break; } - const entry2 = TableSchema_ColumnsEntry.decode(reader, reader.uint32()); + const entry2 = TableSchema_ColumnsEntry.decode( + reader, + reader.uint32() + ); if (entry2.value !== undefined) { message.columns[entry2.key] = entry2.value; } @@ -2735,20 +3237,27 @@ export const TableSchema = { fromJSON(object: any): TableSchema { return { - tableIdentifier: isSet(object.tableIdentifier) ? String(object.tableIdentifier) : "", + tableIdentifier: isSet(object.tableIdentifier) + ? String(object.tableIdentifier) + : '', columns: isObject(object.columns) - ? Object.entries(object.columns).reduce<{ [key: string]: string }>((acc, [key, value]) => { - acc[key] = String(value); - return acc; - }, {}) + ? Object.entries(object.columns).reduce<{ [key: string]: string }>( + (acc, [key, value]) => { + acc[key] = String(value); + return acc; + }, + {} + ) : {}, - primaryKeyColumn: isSet(object.primaryKeyColumn) ? String(object.primaryKeyColumn) : "", + primaryKeyColumn: isSet(object.primaryKeyColumn) + ? String(object.primaryKeyColumn) + : '', }; }, toJSON(message: TableSchema): unknown { const obj: any = {}; - if (message.tableIdentifier !== "") { + if (message.tableIdentifier !== '') { obj.tableIdentifier = message.tableIdentifier; } if (message.columns) { @@ -2760,7 +3269,7 @@ export const TableSchema = { }); } } - if (message.primaryKeyColumn !== "") { + if (message.primaryKeyColumn !== '') { obj.primaryKeyColumn = message.primaryKeyColumn; } return obj; @@ -2769,37 +3278,48 @@ export const TableSchema = { create, I>>(base?: I): TableSchema { return TableSchema.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): TableSchema { + fromPartial, I>>( + object: I + ): TableSchema { const message = createBaseTableSchema(); - message.tableIdentifier = object.tableIdentifier ?? ""; - message.columns = Object.entries(object.columns ?? {}).reduce<{ [key: string]: string }>((acc, [key, value]) => { + message.tableIdentifier = object.tableIdentifier ?? ''; + message.columns = Object.entries(object.columns ?? {}).reduce<{ + [key: string]: string; + }>((acc, [key, value]) => { if (value !== undefined) { acc[key] = String(value); } return acc; }, {}); - message.primaryKeyColumn = object.primaryKeyColumn ?? ""; + message.primaryKeyColumn = object.primaryKeyColumn ?? ''; return message; }, }; function createBaseTableSchema_ColumnsEntry(): TableSchema_ColumnsEntry { - return { key: "", value: "" }; + return { key: '', value: '' }; } export const TableSchema_ColumnsEntry = { - encode(message: TableSchema_ColumnsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.key !== "") { + encode( + message: TableSchema_ColumnsEntry, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.key !== '') { writer.uint32(10).string(message.key); } - if (message.value !== "") { + if (message.value !== '') { writer.uint32(18).string(message.value); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): TableSchema_ColumnsEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): TableSchema_ColumnsEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTableSchema_ColumnsEntry(); while (reader.pos < end) { @@ -2829,27 +3349,34 @@ export const TableSchema_ColumnsEntry = { }, fromJSON(object: any): TableSchema_ColumnsEntry { - return { key: isSet(object.key) ? String(object.key) : "", value: isSet(object.value) ? String(object.value) : "" }; + return { + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) ? String(object.value) : '', + }; }, toJSON(message: TableSchema_ColumnsEntry): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } - if (message.value !== "") { + if (message.value !== '') { obj.value = message.value; } return obj; }, - create, I>>(base?: I): TableSchema_ColumnsEntry { + create, I>>( + base?: I + ): TableSchema_ColumnsEntry { return TableSchema_ColumnsEntry.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): TableSchema_ColumnsEntry { + fromPartial, I>>( + object: I + ): TableSchema_ColumnsEntry { const message = createBaseTableSchema_ColumnsEntry(); - message.key = object.key ?? ""; - message.value = object.value ?? ""; + message.key = object.key ?? ''; + message.value = object.value ?? ''; return message; }, }; @@ -2859,9 +3386,15 @@ function createBaseGetTableSchemaBatchInput(): GetTableSchemaBatchInput { } export const GetTableSchemaBatchInput = { - encode(message: GetTableSchemaBatchInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: GetTableSchemaBatchInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.peerConnectionConfig !== undefined) { - Peer.encode(message.peerConnectionConfig, writer.uint32(10).fork()).ldelim(); + Peer.encode( + message.peerConnectionConfig, + writer.uint32(10).fork() + ).ldelim(); } for (const v of message.tableIdentifiers) { writer.uint32(18).string(v!); @@ -2869,8 +3402,12 @@ export const GetTableSchemaBatchInput = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetTableSchemaBatchInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): GetTableSchemaBatchInput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetTableSchemaBatchInput(); while (reader.pos < end) { @@ -2901,7 +3438,9 @@ export const GetTableSchemaBatchInput = { fromJSON(object: any): GetTableSchemaBatchInput { return { - peerConnectionConfig: isSet(object.peerConnectionConfig) ? Peer.fromJSON(object.peerConnectionConfig) : undefined, + peerConnectionConfig: isSet(object.peerConnectionConfig) + ? Peer.fromJSON(object.peerConnectionConfig) + : undefined, tableIdentifiers: Array.isArray(object?.tableIdentifiers) ? object.tableIdentifiers.map((e: any) => String(e)) : [], @@ -2919,14 +3458,20 @@ export const GetTableSchemaBatchInput = { return obj; }, - create, I>>(base?: I): GetTableSchemaBatchInput { + create, I>>( + base?: I + ): GetTableSchemaBatchInput { return GetTableSchemaBatchInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): GetTableSchemaBatchInput { + fromPartial, I>>( + object: I + ): GetTableSchemaBatchInput { const message = createBaseGetTableSchemaBatchInput(); - message.peerConnectionConfig = (object.peerConnectionConfig !== undefined && object.peerConnectionConfig !== null) - ? Peer.fromPartial(object.peerConnectionConfig) - : undefined; + message.peerConnectionConfig = + object.peerConnectionConfig !== undefined && + object.peerConnectionConfig !== null + ? Peer.fromPartial(object.peerConnectionConfig) + : undefined; message.tableIdentifiers = object.tableIdentifiers?.map((e) => e) || []; return message; }, @@ -2937,16 +3482,25 @@ function createBaseGetTableSchemaBatchOutput(): GetTableSchemaBatchOutput { } export const GetTableSchemaBatchOutput = { - encode(message: GetTableSchemaBatchOutput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: GetTableSchemaBatchOutput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { Object.entries(message.tableNameSchemaMapping).forEach(([key, value]) => { - GetTableSchemaBatchOutput_TableNameSchemaMappingEntry.encode({ key: key as any, value }, writer.uint32(10).fork()) - .ldelim(); + GetTableSchemaBatchOutput_TableNameSchemaMappingEntry.encode( + { key: key as any, value }, + writer.uint32(10).fork() + ).ldelim(); }); return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetTableSchemaBatchOutput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): GetTableSchemaBatchOutput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseGetTableSchemaBatchOutput(); while (reader.pos < end) { @@ -2957,7 +3511,11 @@ export const GetTableSchemaBatchOutput = { break; } - const entry1 = GetTableSchemaBatchOutput_TableNameSchemaMappingEntry.decode(reader, reader.uint32()); + const entry1 = + GetTableSchemaBatchOutput_TableNameSchemaMappingEntry.decode( + reader, + reader.uint32() + ); if (entry1.value !== undefined) { message.tableNameSchemaMapping[entry1.key] = entry1.value; } @@ -2974,10 +3532,12 @@ export const GetTableSchemaBatchOutput = { fromJSON(object: any): GetTableSchemaBatchOutput { return { tableNameSchemaMapping: isObject(object.tableNameSchemaMapping) - ? Object.entries(object.tableNameSchemaMapping).reduce<{ [key: string]: TableSchema }>((acc, [key, value]) => { - acc[key] = TableSchema.fromJSON(value); - return acc; - }, {}) + ? Object.entries(object.tableNameSchemaMapping).reduce<{ + [key: string]: TableSchema; + }>((acc, [key, value]) => { + acc[key] = TableSchema.fromJSON(value); + return acc; + }, {}) : {}, }; }, @@ -2996,14 +3556,18 @@ export const GetTableSchemaBatchOutput = { return obj; }, - create, I>>(base?: I): GetTableSchemaBatchOutput { + create, I>>( + base?: I + ): GetTableSchemaBatchOutput { return GetTableSchemaBatchOutput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): GetTableSchemaBatchOutput { + fromPartial, I>>( + object: I + ): GetTableSchemaBatchOutput { const message = createBaseGetTableSchemaBatchOutput(); - message.tableNameSchemaMapping = Object.entries(object.tableNameSchemaMapping ?? {}).reduce< - { [key: string]: TableSchema } - >((acc, [key, value]) => { + message.tableNameSchemaMapping = Object.entries( + object.tableNameSchemaMapping ?? {} + ).reduce<{ [key: string]: TableSchema }>((acc, [key, value]) => { if (value !== undefined) { acc[key] = TableSchema.fromPartial(value); } @@ -3014,15 +3578,15 @@ export const GetTableSchemaBatchOutput = { }; function createBaseGetTableSchemaBatchOutput_TableNameSchemaMappingEntry(): GetTableSchemaBatchOutput_TableNameSchemaMappingEntry { - return { key: "", value: undefined }; + return { key: '', value: undefined }; } export const GetTableSchemaBatchOutput_TableNameSchemaMappingEntry = { encode( message: GetTableSchemaBatchOutput_TableNameSchemaMappingEntry, - writer: _m0.Writer = _m0.Writer.create(), + writer: _m0.Writer = _m0.Writer.create() ): _m0.Writer { - if (message.key !== "") { + if (message.key !== '') { writer.uint32(10).string(message.key); } if (message.value !== undefined) { @@ -3031,10 +3595,15 @@ export const GetTableSchemaBatchOutput_TableNameSchemaMappingEntry = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): GetTableSchemaBatchOutput_TableNameSchemaMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): GetTableSchemaBatchOutput_TableNameSchemaMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseGetTableSchemaBatchOutput_TableNameSchemaMappingEntry(); + const message = + createBaseGetTableSchemaBatchOutput_TableNameSchemaMappingEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { @@ -3063,14 +3632,18 @@ export const GetTableSchemaBatchOutput_TableNameSchemaMappingEntry = { fromJSON(object: any): GetTableSchemaBatchOutput_TableNameSchemaMappingEntry { return { - key: isSet(object.key) ? String(object.key) : "", - value: isSet(object.value) ? TableSchema.fromJSON(object.value) : undefined, + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) + ? TableSchema.fromJSON(object.value) + : undefined, }; }, - toJSON(message: GetTableSchemaBatchOutput_TableNameSchemaMappingEntry): unknown { + toJSON( + message: GetTableSchemaBatchOutput_TableNameSchemaMappingEntry + ): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } if (message.value !== undefined) { @@ -3079,43 +3652,70 @@ export const GetTableSchemaBatchOutput_TableNameSchemaMappingEntry = { return obj; }, - create, I>>( - base?: I, - ): GetTableSchemaBatchOutput_TableNameSchemaMappingEntry { - return GetTableSchemaBatchOutput_TableNameSchemaMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact< + DeepPartial, + I + >, + >(base?: I): GetTableSchemaBatchOutput_TableNameSchemaMappingEntry { + return GetTableSchemaBatchOutput_TableNameSchemaMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): GetTableSchemaBatchOutput_TableNameSchemaMappingEntry { - const message = createBaseGetTableSchemaBatchOutput_TableNameSchemaMappingEntry(); - message.key = object.key ?? ""; - message.value = (object.value !== undefined && object.value !== null) - ? TableSchema.fromPartial(object.value) - : undefined; + fromPartial< + I extends Exact< + DeepPartial, + I + >, + >(object: I): GetTableSchemaBatchOutput_TableNameSchemaMappingEntry { + const message = + createBaseGetTableSchemaBatchOutput_TableNameSchemaMappingEntry(); + message.key = object.key ?? ''; + message.value = + object.value !== undefined && object.value !== null + ? TableSchema.fromPartial(object.value) + : undefined; return message; }, }; function createBaseSetupNormalizedTableInput(): SetupNormalizedTableInput { - return { peerConnectionConfig: undefined, tableIdentifier: "", sourceTableSchema: undefined }; + return { + peerConnectionConfig: undefined, + tableIdentifier: '', + sourceTableSchema: undefined, + }; } export const SetupNormalizedTableInput = { - encode(message: SetupNormalizedTableInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: SetupNormalizedTableInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.peerConnectionConfig !== undefined) { - Peer.encode(message.peerConnectionConfig, writer.uint32(10).fork()).ldelim(); + Peer.encode( + message.peerConnectionConfig, + writer.uint32(10).fork() + ).ldelim(); } - if (message.tableIdentifier !== "") { + if (message.tableIdentifier !== '') { writer.uint32(18).string(message.tableIdentifier); } if (message.sourceTableSchema !== undefined) { - TableSchema.encode(message.sourceTableSchema, writer.uint32(26).fork()).ldelim(); + TableSchema.encode( + message.sourceTableSchema, + writer.uint32(26).fork() + ).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupNormalizedTableInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupNormalizedTableInput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSetupNormalizedTableInput(); while (reader.pos < end) { @@ -3140,7 +3740,10 @@ export const SetupNormalizedTableInput = { break; } - message.sourceTableSchema = TableSchema.decode(reader, reader.uint32()); + message.sourceTableSchema = TableSchema.decode( + reader, + reader.uint32() + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -3153,9 +3756,15 @@ export const SetupNormalizedTableInput = { fromJSON(object: any): SetupNormalizedTableInput { return { - peerConnectionConfig: isSet(object.peerConnectionConfig) ? Peer.fromJSON(object.peerConnectionConfig) : undefined, - tableIdentifier: isSet(object.tableIdentifier) ? String(object.tableIdentifier) : "", - sourceTableSchema: isSet(object.sourceTableSchema) ? TableSchema.fromJSON(object.sourceTableSchema) : undefined, + peerConnectionConfig: isSet(object.peerConnectionConfig) + ? Peer.fromJSON(object.peerConnectionConfig) + : undefined, + tableIdentifier: isSet(object.tableIdentifier) + ? String(object.tableIdentifier) + : '', + sourceTableSchema: isSet(object.sourceTableSchema) + ? TableSchema.fromJSON(object.sourceTableSchema) + : undefined, }; }, @@ -3164,7 +3773,7 @@ export const SetupNormalizedTableInput = { if (message.peerConnectionConfig !== undefined) { obj.peerConnectionConfig = Peer.toJSON(message.peerConnectionConfig); } - if (message.tableIdentifier !== "") { + if (message.tableIdentifier !== '') { obj.tableIdentifier = message.tableIdentifier; } if (message.sourceTableSchema !== undefined) { @@ -3173,18 +3782,26 @@ export const SetupNormalizedTableInput = { return obj; }, - create, I>>(base?: I): SetupNormalizedTableInput { + create, I>>( + base?: I + ): SetupNormalizedTableInput { return SetupNormalizedTableInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): SetupNormalizedTableInput { + fromPartial, I>>( + object: I + ): SetupNormalizedTableInput { const message = createBaseSetupNormalizedTableInput(); - message.peerConnectionConfig = (object.peerConnectionConfig !== undefined && object.peerConnectionConfig !== null) - ? Peer.fromPartial(object.peerConnectionConfig) - : undefined; - message.tableIdentifier = object.tableIdentifier ?? ""; - message.sourceTableSchema = (object.sourceTableSchema !== undefined && object.sourceTableSchema !== null) - ? TableSchema.fromPartial(object.sourceTableSchema) - : undefined; + message.peerConnectionConfig = + object.peerConnectionConfig !== undefined && + object.peerConnectionConfig !== null + ? Peer.fromPartial(object.peerConnectionConfig) + : undefined; + message.tableIdentifier = object.tableIdentifier ?? ''; + message.sourceTableSchema = + object.sourceTableSchema !== undefined && + object.sourceTableSchema !== null + ? TableSchema.fromPartial(object.sourceTableSchema) + : undefined; return message; }, }; @@ -3194,21 +3811,31 @@ function createBaseSetupNormalizedTableBatchInput(): SetupNormalizedTableBatchIn } export const SetupNormalizedTableBatchInput = { - encode(message: SetupNormalizedTableBatchInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: SetupNormalizedTableBatchInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.peerConnectionConfig !== undefined) { - Peer.encode(message.peerConnectionConfig, writer.uint32(10).fork()).ldelim(); + Peer.encode( + message.peerConnectionConfig, + writer.uint32(10).fork() + ).ldelim(); } Object.entries(message.tableNameSchemaMapping).forEach(([key, value]) => { SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry.encode( { key: key as any, value }, - writer.uint32(18).fork(), + writer.uint32(18).fork() ).ldelim(); }); return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupNormalizedTableBatchInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupNormalizedTableBatchInput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSetupNormalizedTableBatchInput(); while (reader.pos < end) { @@ -3226,7 +3853,11 @@ export const SetupNormalizedTableBatchInput = { break; } - const entry2 = SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry.decode(reader, reader.uint32()); + const entry2 = + SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry.decode( + reader, + reader.uint32() + ); if (entry2.value !== undefined) { message.tableNameSchemaMapping[entry2.key] = entry2.value; } @@ -3242,12 +3873,16 @@ export const SetupNormalizedTableBatchInput = { fromJSON(object: any): SetupNormalizedTableBatchInput { return { - peerConnectionConfig: isSet(object.peerConnectionConfig) ? Peer.fromJSON(object.peerConnectionConfig) : undefined, + peerConnectionConfig: isSet(object.peerConnectionConfig) + ? Peer.fromJSON(object.peerConnectionConfig) + : undefined, tableNameSchemaMapping: isObject(object.tableNameSchemaMapping) - ? Object.entries(object.tableNameSchemaMapping).reduce<{ [key: string]: TableSchema }>((acc, [key, value]) => { - acc[key] = TableSchema.fromJSON(value); - return acc; - }, {}) + ? Object.entries(object.tableNameSchemaMapping).reduce<{ + [key: string]: TableSchema; + }>((acc, [key, value]) => { + acc[key] = TableSchema.fromJSON(value); + return acc; + }, {}) : {}, }; }, @@ -3269,19 +3904,23 @@ export const SetupNormalizedTableBatchInput = { return obj; }, - create, I>>(base?: I): SetupNormalizedTableBatchInput { + create, I>>( + base?: I + ): SetupNormalizedTableBatchInput { return SetupNormalizedTableBatchInput.fromPartial(base ?? ({} as any)); }, fromPartial, I>>( - object: I, + object: I ): SetupNormalizedTableBatchInput { const message = createBaseSetupNormalizedTableBatchInput(); - message.peerConnectionConfig = (object.peerConnectionConfig !== undefined && object.peerConnectionConfig !== null) - ? Peer.fromPartial(object.peerConnectionConfig) - : undefined; - message.tableNameSchemaMapping = Object.entries(object.tableNameSchemaMapping ?? {}).reduce< - { [key: string]: TableSchema } - >((acc, [key, value]) => { + message.peerConnectionConfig = + object.peerConnectionConfig !== undefined && + object.peerConnectionConfig !== null + ? Peer.fromPartial(object.peerConnectionConfig) + : undefined; + message.tableNameSchemaMapping = Object.entries( + object.tableNameSchemaMapping ?? {} + ).reduce<{ [key: string]: TableSchema }>((acc, [key, value]) => { if (value !== undefined) { acc[key] = TableSchema.fromPartial(value); } @@ -3292,15 +3931,15 @@ export const SetupNormalizedTableBatchInput = { }; function createBaseSetupNormalizedTableBatchInput_TableNameSchemaMappingEntry(): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { - return { key: "", value: undefined }; + return { key: '', value: undefined }; } export const SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry = { encode( message: SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry, - writer: _m0.Writer = _m0.Writer.create(), + writer: _m0.Writer = _m0.Writer.create() ): _m0.Writer { - if (message.key !== "") { + if (message.key !== '') { writer.uint32(10).string(message.key); } if (message.value !== undefined) { @@ -3309,10 +3948,15 @@ export const SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSetupNormalizedTableBatchInput_TableNameSchemaMappingEntry(); + const message = + createBaseSetupNormalizedTableBatchInput_TableNameSchemaMappingEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { @@ -3339,16 +3983,22 @@ export const SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry = { return message; }, - fromJSON(object: any): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { + fromJSON( + object: any + ): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { return { - key: isSet(object.key) ? String(object.key) : "", - value: isSet(object.value) ? TableSchema.fromJSON(object.value) : undefined, + key: isSet(object.key) ? String(object.key) : '', + value: isSet(object.value) + ? TableSchema.fromJSON(object.value) + : undefined, }; }, - toJSON(message: SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry): unknown { + toJSON( + message: SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry + ): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } if (message.value !== undefined) { @@ -3357,30 +4007,43 @@ export const SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry = { return obj; }, - create, I>>( - base?: I, - ): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { - return SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact< + DeepPartial, + I + >, + >(base?: I): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { + return SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { - const message = createBaseSetupNormalizedTableBatchInput_TableNameSchemaMappingEntry(); - message.key = object.key ?? ""; - message.value = (object.value !== undefined && object.value !== null) - ? TableSchema.fromPartial(object.value) - : undefined; + fromPartial< + I extends Exact< + DeepPartial, + I + >, + >(object: I): SetupNormalizedTableBatchInput_TableNameSchemaMappingEntry { + const message = + createBaseSetupNormalizedTableBatchInput_TableNameSchemaMappingEntry(); + message.key = object.key ?? ''; + message.value = + object.value !== undefined && object.value !== null + ? TableSchema.fromPartial(object.value) + : undefined; return message; }, }; function createBaseSetupNormalizedTableOutput(): SetupNormalizedTableOutput { - return { tableIdentifier: "", alreadyExists: false }; + return { tableIdentifier: '', alreadyExists: false }; } export const SetupNormalizedTableOutput = { - encode(message: SetupNormalizedTableOutput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.tableIdentifier !== "") { + encode( + message: SetupNormalizedTableOutput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.tableIdentifier !== '') { writer.uint32(10).string(message.tableIdentifier); } if (message.alreadyExists === true) { @@ -3389,8 +4052,12 @@ export const SetupNormalizedTableOutput = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupNormalizedTableOutput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupNormalizedTableOutput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSetupNormalizedTableOutput(); while (reader.pos < end) { @@ -3421,14 +4088,18 @@ export const SetupNormalizedTableOutput = { fromJSON(object: any): SetupNormalizedTableOutput { return { - tableIdentifier: isSet(object.tableIdentifier) ? String(object.tableIdentifier) : "", - alreadyExists: isSet(object.alreadyExists) ? Boolean(object.alreadyExists) : false, + tableIdentifier: isSet(object.tableIdentifier) + ? String(object.tableIdentifier) + : '', + alreadyExists: isSet(object.alreadyExists) + ? Boolean(object.alreadyExists) + : false, }; }, toJSON(message: SetupNormalizedTableOutput): unknown { const obj: any = {}; - if (message.tableIdentifier !== "") { + if (message.tableIdentifier !== '') { obj.tableIdentifier = message.tableIdentifier; } if (message.alreadyExists === true) { @@ -3437,12 +4108,16 @@ export const SetupNormalizedTableOutput = { return obj; }, - create, I>>(base?: I): SetupNormalizedTableOutput { + create, I>>( + base?: I + ): SetupNormalizedTableOutput { return SetupNormalizedTableOutput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): SetupNormalizedTableOutput { + fromPartial, I>>( + object: I + ): SetupNormalizedTableOutput { const message = createBaseSetupNormalizedTableOutput(); - message.tableIdentifier = object.tableIdentifier ?? ""; + message.tableIdentifier = object.tableIdentifier ?? ''; message.alreadyExists = object.alreadyExists ?? false; return message; }, @@ -3453,18 +4128,25 @@ function createBaseSetupNormalizedTableBatchOutput(): SetupNormalizedTableBatchO } export const SetupNormalizedTableBatchOutput = { - encode(message: SetupNormalizedTableBatchOutput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: SetupNormalizedTableBatchOutput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { Object.entries(message.tableExistsMapping).forEach(([key, value]) => { SetupNormalizedTableBatchOutput_TableExistsMappingEntry.encode( { key: key as any, value }, - writer.uint32(10).fork(), + writer.uint32(10).fork() ).ldelim(); }); return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupNormalizedTableBatchOutput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupNormalizedTableBatchOutput { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSetupNormalizedTableBatchOutput(); while (reader.pos < end) { @@ -3475,7 +4157,11 @@ export const SetupNormalizedTableBatchOutput = { break; } - const entry1 = SetupNormalizedTableBatchOutput_TableExistsMappingEntry.decode(reader, reader.uint32()); + const entry1 = + SetupNormalizedTableBatchOutput_TableExistsMappingEntry.decode( + reader, + reader.uint32() + ); if (entry1.value !== undefined) { message.tableExistsMapping[entry1.key] = entry1.value; } @@ -3492,10 +4178,12 @@ export const SetupNormalizedTableBatchOutput = { fromJSON(object: any): SetupNormalizedTableBatchOutput { return { tableExistsMapping: isObject(object.tableExistsMapping) - ? Object.entries(object.tableExistsMapping).reduce<{ [key: string]: boolean }>((acc, [key, value]) => { - acc[key] = Boolean(value); - return acc; - }, {}) + ? Object.entries(object.tableExistsMapping).reduce<{ + [key: string]: boolean; + }>((acc, [key, value]) => { + acc[key] = Boolean(value); + return acc; + }, {}) : {}, }; }, @@ -3514,36 +4202,37 @@ export const SetupNormalizedTableBatchOutput = { return obj; }, - create, I>>(base?: I): SetupNormalizedTableBatchOutput { + create, I>>( + base?: I + ): SetupNormalizedTableBatchOutput { return SetupNormalizedTableBatchOutput.fromPartial(base ?? ({} as any)); }, fromPartial, I>>( - object: I, + object: I ): SetupNormalizedTableBatchOutput { const message = createBaseSetupNormalizedTableBatchOutput(); - message.tableExistsMapping = Object.entries(object.tableExistsMapping ?? {}).reduce<{ [key: string]: boolean }>( - (acc, [key, value]) => { - if (value !== undefined) { - acc[key] = Boolean(value); - } - return acc; - }, - {}, - ); + message.tableExistsMapping = Object.entries( + object.tableExistsMapping ?? {} + ).reduce<{ [key: string]: boolean }>((acc, [key, value]) => { + if (value !== undefined) { + acc[key] = Boolean(value); + } + return acc; + }, {}); return message; }, }; function createBaseSetupNormalizedTableBatchOutput_TableExistsMappingEntry(): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { - return { key: "", value: false }; + return { key: '', value: false }; } export const SetupNormalizedTableBatchOutput_TableExistsMappingEntry = { encode( message: SetupNormalizedTableBatchOutput_TableExistsMappingEntry, - writer: _m0.Writer = _m0.Writer.create(), + writer: _m0.Writer = _m0.Writer.create() ): _m0.Writer { - if (message.key !== "") { + if (message.key !== '') { writer.uint32(10).string(message.key); } if (message.value === true) { @@ -3552,10 +4241,15 @@ export const SetupNormalizedTableBatchOutput_TableExistsMappingEntry = { return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseSetupNormalizedTableBatchOutput_TableExistsMappingEntry(); + const message = + createBaseSetupNormalizedTableBatchOutput_TableExistsMappingEntry(); while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { @@ -3582,16 +4276,20 @@ export const SetupNormalizedTableBatchOutput_TableExistsMappingEntry = { return message; }, - fromJSON(object: any): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { + fromJSON( + object: any + ): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { return { - key: isSet(object.key) ? String(object.key) : "", + key: isSet(object.key) ? String(object.key) : '', value: isSet(object.value) ? Boolean(object.value) : false, }; }, - toJSON(message: SetupNormalizedTableBatchOutput_TableExistsMappingEntry): unknown { + toJSON( + message: SetupNormalizedTableBatchOutput_TableExistsMappingEntry + ): unknown { const obj: any = {}; - if (message.key !== "") { + if (message.key !== '') { obj.key = message.key; } if (message.value === true) { @@ -3600,16 +4298,25 @@ export const SetupNormalizedTableBatchOutput_TableExistsMappingEntry = { return obj; }, - create, I>>( - base?: I, - ): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { - return SetupNormalizedTableBatchOutput_TableExistsMappingEntry.fromPartial(base ?? ({} as any)); + create< + I extends Exact< + DeepPartial, + I + >, + >(base?: I): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { + return SetupNormalizedTableBatchOutput_TableExistsMappingEntry.fromPartial( + base ?? ({} as any) + ); }, - fromPartial, I>>( - object: I, - ): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { - const message = createBaseSetupNormalizedTableBatchOutput_TableExistsMappingEntry(); - message.key = object.key ?? ""; + fromPartial< + I extends Exact< + DeepPartial, + I + >, + >(object: I): SetupNormalizedTableBatchOutput_TableExistsMappingEntry { + const message = + createBaseSetupNormalizedTableBatchOutput_TableExistsMappingEntry(); + message.key = object.key ?? ''; message.value = object.value ?? false; return message; }, @@ -3620,7 +4327,10 @@ function createBaseIntPartitionRange(): IntPartitionRange { } export const IntPartitionRange = { - encode(message: IntPartitionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: IntPartitionRange, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.start !== 0) { writer.uint32(8).int64(message.start); } @@ -3631,7 +4341,8 @@ export const IntPartitionRange = { }, decode(input: _m0.Reader | Uint8Array, length?: number): IntPartitionRange { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseIntPartitionRange(); while (reader.pos < end) { @@ -3661,7 +4372,10 @@ export const IntPartitionRange = { }, fromJSON(object: any): IntPartitionRange { - return { start: isSet(object.start) ? Number(object.start) : 0, end: isSet(object.end) ? Number(object.end) : 0 }; + return { + start: isSet(object.start) ? Number(object.start) : 0, + end: isSet(object.end) ? Number(object.end) : 0, + }; }, toJSON(message: IntPartitionRange): unknown { @@ -3675,10 +4389,14 @@ export const IntPartitionRange = { return obj; }, - create, I>>(base?: I): IntPartitionRange { + create, I>>( + base?: I + ): IntPartitionRange { return IntPartitionRange.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): IntPartitionRange { + fromPartial, I>>( + object: I + ): IntPartitionRange { const message = createBaseIntPartitionRange(); message.start = object.start ?? 0; message.end = object.end ?? 0; @@ -3691,18 +4409,31 @@ function createBaseTimestampPartitionRange(): TimestampPartitionRange { } export const TimestampPartitionRange = { - encode(message: TimestampPartitionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: TimestampPartitionRange, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.start !== undefined) { - Timestamp.encode(toTimestamp(message.start), writer.uint32(10).fork()).ldelim(); + Timestamp.encode( + toTimestamp(message.start), + writer.uint32(10).fork() + ).ldelim(); } if (message.end !== undefined) { - Timestamp.encode(toTimestamp(message.end), writer.uint32(18).fork()).ldelim(); + Timestamp.encode( + toTimestamp(message.end), + writer.uint32(18).fork() + ).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): TimestampPartitionRange { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): TimestampPartitionRange { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTimestampPartitionRange(); while (reader.pos < end) { @@ -3713,14 +4444,18 @@ export const TimestampPartitionRange = { break; } - message.start = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + message.start = fromTimestamp( + Timestamp.decode(reader, reader.uint32()) + ); continue; case 2: if (tag !== 18) { break; } - message.end = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + message.end = fromTimestamp( + Timestamp.decode(reader, reader.uint32()) + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -3749,10 +4484,14 @@ export const TimestampPartitionRange = { return obj; }, - create, I>>(base?: I): TimestampPartitionRange { + create, I>>( + base?: I + ): TimestampPartitionRange { return TimestampPartitionRange.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): TimestampPartitionRange { + fromPartial, I>>( + object: I + ): TimestampPartitionRange { const message = createBaseTimestampPartitionRange(); message.start = object.start ?? undefined; message.end = object.end ?? undefined; @@ -3776,7 +4515,8 @@ export const TID = { }, decode(input: _m0.Reader | Uint8Array, length?: number): TID { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTID(); while (reader.pos < end) { @@ -3808,7 +4548,9 @@ export const TID = { fromJSON(object: any): TID { return { blockNumber: isSet(object.blockNumber) ? Number(object.blockNumber) : 0, - offsetNumber: isSet(object.offsetNumber) ? Number(object.offsetNumber) : 0, + offsetNumber: isSet(object.offsetNumber) + ? Number(object.offsetNumber) + : 0, }; }, @@ -3839,7 +4581,10 @@ function createBaseTIDPartitionRange(): TIDPartitionRange { } export const TIDPartitionRange = { - encode(message: TIDPartitionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: TIDPartitionRange, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.start !== undefined) { TID.encode(message.start, writer.uint32(10).fork()).ldelim(); } @@ -3850,7 +4595,8 @@ export const TIDPartitionRange = { }, decode(input: _m0.Reader | Uint8Array, length?: number): TIDPartitionRange { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTIDPartitionRange(); while (reader.pos < end) { @@ -3897,37 +4643,64 @@ export const TIDPartitionRange = { return obj; }, - create, I>>(base?: I): TIDPartitionRange { + create, I>>( + base?: I + ): TIDPartitionRange { return TIDPartitionRange.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): TIDPartitionRange { + fromPartial, I>>( + object: I + ): TIDPartitionRange { const message = createBaseTIDPartitionRange(); - message.start = (object.start !== undefined && object.start !== null) ? TID.fromPartial(object.start) : undefined; - message.end = (object.end !== undefined && object.end !== null) ? TID.fromPartial(object.end) : undefined; + message.start = + object.start !== undefined && object.start !== null + ? TID.fromPartial(object.start) + : undefined; + message.end = + object.end !== undefined && object.end !== null + ? TID.fromPartial(object.end) + : undefined; return message; }, }; function createBasePartitionRange(): PartitionRange { - return { intRange: undefined, timestampRange: undefined, tidRange: undefined }; + return { + intRange: undefined, + timestampRange: undefined, + tidRange: undefined, + }; } export const PartitionRange = { - encode(message: PartitionRange, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: PartitionRange, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.intRange !== undefined) { - IntPartitionRange.encode(message.intRange, writer.uint32(10).fork()).ldelim(); + IntPartitionRange.encode( + message.intRange, + writer.uint32(10).fork() + ).ldelim(); } if (message.timestampRange !== undefined) { - TimestampPartitionRange.encode(message.timestampRange, writer.uint32(18).fork()).ldelim(); + TimestampPartitionRange.encode( + message.timestampRange, + writer.uint32(18).fork() + ).ldelim(); } if (message.tidRange !== undefined) { - TIDPartitionRange.encode(message.tidRange, writer.uint32(26).fork()).ldelim(); + TIDPartitionRange.encode( + message.tidRange, + writer.uint32(26).fork() + ).ldelim(); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): PartitionRange { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBasePartitionRange(); while (reader.pos < end) { @@ -3945,7 +4718,10 @@ export const PartitionRange = { break; } - message.timestampRange = TimestampPartitionRange.decode(reader, reader.uint32()); + message.timestampRange = TimestampPartitionRange.decode( + reader, + reader.uint32() + ); continue; case 3: if (tag !== 26) { @@ -3965,11 +4741,15 @@ export const PartitionRange = { fromJSON(object: any): PartitionRange { return { - intRange: isSet(object.intRange) ? IntPartitionRange.fromJSON(object.intRange) : undefined, + intRange: isSet(object.intRange) + ? IntPartitionRange.fromJSON(object.intRange) + : undefined, timestampRange: isSet(object.timestampRange) ? TimestampPartitionRange.fromJSON(object.timestampRange) : undefined, - tidRange: isSet(object.tidRange) ? TIDPartitionRange.fromJSON(object.tidRange) : undefined, + tidRange: isSet(object.tidRange) + ? TIDPartitionRange.fromJSON(object.tidRange) + : undefined, }; }, @@ -3979,7 +4759,9 @@ export const PartitionRange = { obj.intRange = IntPartitionRange.toJSON(message.intRange); } if (message.timestampRange !== undefined) { - obj.timestampRange = TimestampPartitionRange.toJSON(message.timestampRange); + obj.timestampRange = TimestampPartitionRange.toJSON( + message.timestampRange + ); } if (message.tidRange !== undefined) { obj.tidRange = TIDPartitionRange.toJSON(message.tidRange); @@ -3987,20 +4769,27 @@ export const PartitionRange = { return obj; }, - create, I>>(base?: I): PartitionRange { + create, I>>( + base?: I + ): PartitionRange { return PartitionRange.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): PartitionRange { + fromPartial, I>>( + object: I + ): PartitionRange { const message = createBasePartitionRange(); - message.intRange = (object.intRange !== undefined && object.intRange !== null) - ? IntPartitionRange.fromPartial(object.intRange) - : undefined; - message.timestampRange = (object.timestampRange !== undefined && object.timestampRange !== null) - ? TimestampPartitionRange.fromPartial(object.timestampRange) - : undefined; - message.tidRange = (object.tidRange !== undefined && object.tidRange !== null) - ? TIDPartitionRange.fromPartial(object.tidRange) - : undefined; + message.intRange = + object.intRange !== undefined && object.intRange !== null + ? IntPartitionRange.fromPartial(object.intRange) + : undefined; + message.timestampRange = + object.timestampRange !== undefined && object.timestampRange !== null + ? TimestampPartitionRange.fromPartial(object.timestampRange) + : undefined; + message.tidRange = + object.tidRange !== undefined && object.tidRange !== null + ? TIDPartitionRange.fromPartial(object.tidRange) + : undefined; return message; }, }; @@ -4010,7 +4799,10 @@ function createBaseQRepWriteMode(): QRepWriteMode { } export const QRepWriteMode = { - encode(message: QRepWriteMode, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: QRepWriteMode, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.writeType !== 0) { writer.uint32(8).int32(message.writeType); } @@ -4021,7 +4813,8 @@ export const QRepWriteMode = { }, decode(input: _m0.Reader | Uint8Array, length?: number): QRepWriteMode { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseQRepWriteMode(); while (reader.pos < end) { @@ -4052,7 +4845,9 @@ export const QRepWriteMode = { fromJSON(object: any): QRepWriteMode { return { - writeType: isSet(object.writeType) ? qRepWriteTypeFromJSON(object.writeType) : 0, + writeType: isSet(object.writeType) + ? qRepWriteTypeFromJSON(object.writeType) + : 0, upsertKeyColumns: Array.isArray(object?.upsertKeyColumns) ? object.upsertKeyColumns.map((e: any) => String(e)) : [], @@ -4070,10 +4865,14 @@ export const QRepWriteMode = { return obj; }, - create, I>>(base?: I): QRepWriteMode { + create, I>>( + base?: I + ): QRepWriteMode { return QRepWriteMode.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): QRepWriteMode { + fromPartial, I>>( + object: I + ): QRepWriteMode { const message = createBaseQRepWriteMode(); message.writeType = object.writeType ?? 0; message.upsertKeyColumns = object.upsertKeyColumns?.map((e) => e) || []; @@ -4083,13 +4882,13 @@ export const QRepWriteMode = { function createBaseQRepConfig(): QRepConfig { return { - flowJobName: "", + flowJobName: '', sourcePeer: undefined, destinationPeer: undefined, - destinationTableIdentifier: "", - query: "", - watermarkTable: "", - watermarkColumn: "", + destinationTableIdentifier: '', + query: '', + watermarkTable: '', + watermarkColumn: '', initialCopyOnly: false, syncMode: 0, batchSizeInt: 0, @@ -4097,14 +4896,17 @@ function createBaseQRepConfig(): QRepConfig { maxParallelWorkers: 0, waitBetweenBatchesSeconds: 0, writeMode: undefined, - stagingPath: "", + stagingPath: '', numRowsPerPartition: 0, }; } export const QRepConfig = { - encode(message: QRepConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.flowJobName !== "") { + encode( + message: QRepConfig, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.flowJobName !== '') { writer.uint32(10).string(message.flowJobName); } if (message.sourcePeer !== undefined) { @@ -4113,16 +4915,16 @@ export const QRepConfig = { if (message.destinationPeer !== undefined) { Peer.encode(message.destinationPeer, writer.uint32(26).fork()).ldelim(); } - if (message.destinationTableIdentifier !== "") { + if (message.destinationTableIdentifier !== '') { writer.uint32(34).string(message.destinationTableIdentifier); } - if (message.query !== "") { + if (message.query !== '') { writer.uint32(42).string(message.query); } - if (message.watermarkTable !== "") { + if (message.watermarkTable !== '') { writer.uint32(50).string(message.watermarkTable); } - if (message.watermarkColumn !== "") { + if (message.watermarkColumn !== '') { writer.uint32(58).string(message.watermarkColumn); } if (message.initialCopyOnly === true) { @@ -4144,9 +4946,12 @@ export const QRepConfig = { writer.uint32(104).uint32(message.waitBetweenBatchesSeconds); } if (message.writeMode !== undefined) { - QRepWriteMode.encode(message.writeMode, writer.uint32(114).fork()).ldelim(); + QRepWriteMode.encode( + message.writeMode, + writer.uint32(114).fork() + ).ldelim(); } - if (message.stagingPath !== "") { + if (message.stagingPath !== '') { writer.uint32(122).string(message.stagingPath); } if (message.numRowsPerPartition !== 0) { @@ -4156,7 +4961,8 @@ export const QRepConfig = { }, decode(input: _m0.Reader | Uint8Array, length?: number): QRepConfig { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseQRepConfig(); while (reader.pos < end) { @@ -4285,30 +5091,54 @@ export const QRepConfig = { fromJSON(object: any): QRepConfig { return { - flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", - sourcePeer: isSet(object.sourcePeer) ? Peer.fromJSON(object.sourcePeer) : undefined, - destinationPeer: isSet(object.destinationPeer) ? Peer.fromJSON(object.destinationPeer) : undefined, + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : '', + sourcePeer: isSet(object.sourcePeer) + ? Peer.fromJSON(object.sourcePeer) + : undefined, + destinationPeer: isSet(object.destinationPeer) + ? Peer.fromJSON(object.destinationPeer) + : undefined, destinationTableIdentifier: isSet(object.destinationTableIdentifier) ? String(object.destinationTableIdentifier) - : "", - query: isSet(object.query) ? String(object.query) : "", - watermarkTable: isSet(object.watermarkTable) ? String(object.watermarkTable) : "", - watermarkColumn: isSet(object.watermarkColumn) ? String(object.watermarkColumn) : "", - initialCopyOnly: isSet(object.initialCopyOnly) ? Boolean(object.initialCopyOnly) : false, - syncMode: isSet(object.syncMode) ? qRepSyncModeFromJSON(object.syncMode) : 0, - batchSizeInt: isSet(object.batchSizeInt) ? Number(object.batchSizeInt) : 0, - batchDurationSeconds: isSet(object.batchDurationSeconds) ? Number(object.batchDurationSeconds) : 0, - maxParallelWorkers: isSet(object.maxParallelWorkers) ? Number(object.maxParallelWorkers) : 0, - waitBetweenBatchesSeconds: isSet(object.waitBetweenBatchesSeconds) ? Number(object.waitBetweenBatchesSeconds) : 0, - writeMode: isSet(object.writeMode) ? QRepWriteMode.fromJSON(object.writeMode) : undefined, - stagingPath: isSet(object.stagingPath) ? String(object.stagingPath) : "", - numRowsPerPartition: isSet(object.numRowsPerPartition) ? Number(object.numRowsPerPartition) : 0, + : '', + query: isSet(object.query) ? String(object.query) : '', + watermarkTable: isSet(object.watermarkTable) + ? String(object.watermarkTable) + : '', + watermarkColumn: isSet(object.watermarkColumn) + ? String(object.watermarkColumn) + : '', + initialCopyOnly: isSet(object.initialCopyOnly) + ? Boolean(object.initialCopyOnly) + : false, + syncMode: isSet(object.syncMode) + ? qRepSyncModeFromJSON(object.syncMode) + : 0, + batchSizeInt: isSet(object.batchSizeInt) + ? Number(object.batchSizeInt) + : 0, + batchDurationSeconds: isSet(object.batchDurationSeconds) + ? Number(object.batchDurationSeconds) + : 0, + maxParallelWorkers: isSet(object.maxParallelWorkers) + ? Number(object.maxParallelWorkers) + : 0, + waitBetweenBatchesSeconds: isSet(object.waitBetweenBatchesSeconds) + ? Number(object.waitBetweenBatchesSeconds) + : 0, + writeMode: isSet(object.writeMode) + ? QRepWriteMode.fromJSON(object.writeMode) + : undefined, + stagingPath: isSet(object.stagingPath) ? String(object.stagingPath) : '', + numRowsPerPartition: isSet(object.numRowsPerPartition) + ? Number(object.numRowsPerPartition) + : 0, }; }, toJSON(message: QRepConfig): unknown { const obj: any = {}; - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { obj.flowJobName = message.flowJobName; } if (message.sourcePeer !== undefined) { @@ -4317,16 +5147,16 @@ export const QRepConfig = { if (message.destinationPeer !== undefined) { obj.destinationPeer = Peer.toJSON(message.destinationPeer); } - if (message.destinationTableIdentifier !== "") { + if (message.destinationTableIdentifier !== '') { obj.destinationTableIdentifier = message.destinationTableIdentifier; } - if (message.query !== "") { + if (message.query !== '') { obj.query = message.query; } - if (message.watermarkTable !== "") { + if (message.watermarkTable !== '') { obj.watermarkTable = message.watermarkTable; } - if (message.watermarkColumn !== "") { + if (message.watermarkColumn !== '') { obj.watermarkColumn = message.watermarkColumn; } if (message.initialCopyOnly === true) { @@ -4345,12 +5175,14 @@ export const QRepConfig = { obj.maxParallelWorkers = Math.round(message.maxParallelWorkers); } if (message.waitBetweenBatchesSeconds !== 0) { - obj.waitBetweenBatchesSeconds = Math.round(message.waitBetweenBatchesSeconds); + obj.waitBetweenBatchesSeconds = Math.round( + message.waitBetweenBatchesSeconds + ); } if (message.writeMode !== undefined) { obj.writeMode = QRepWriteMode.toJSON(message.writeMode); } - if (message.stagingPath !== "") { + if (message.stagingPath !== '') { obj.stagingPath = message.stagingPath; } if (message.numRowsPerPartition !== 0) { @@ -4362,41 +5194,50 @@ export const QRepConfig = { create, I>>(base?: I): QRepConfig { return QRepConfig.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): QRepConfig { + fromPartial, I>>( + object: I + ): QRepConfig { const message = createBaseQRepConfig(); - message.flowJobName = object.flowJobName ?? ""; - message.sourcePeer = (object.sourcePeer !== undefined && object.sourcePeer !== null) - ? Peer.fromPartial(object.sourcePeer) - : undefined; - message.destinationPeer = (object.destinationPeer !== undefined && object.destinationPeer !== null) - ? Peer.fromPartial(object.destinationPeer) - : undefined; - message.destinationTableIdentifier = object.destinationTableIdentifier ?? ""; - message.query = object.query ?? ""; - message.watermarkTable = object.watermarkTable ?? ""; - message.watermarkColumn = object.watermarkColumn ?? ""; + message.flowJobName = object.flowJobName ?? ''; + message.sourcePeer = + object.sourcePeer !== undefined && object.sourcePeer !== null + ? Peer.fromPartial(object.sourcePeer) + : undefined; + message.destinationPeer = + object.destinationPeer !== undefined && object.destinationPeer !== null + ? Peer.fromPartial(object.destinationPeer) + : undefined; + message.destinationTableIdentifier = + object.destinationTableIdentifier ?? ''; + message.query = object.query ?? ''; + message.watermarkTable = object.watermarkTable ?? ''; + message.watermarkColumn = object.watermarkColumn ?? ''; message.initialCopyOnly = object.initialCopyOnly ?? false; message.syncMode = object.syncMode ?? 0; message.batchSizeInt = object.batchSizeInt ?? 0; message.batchDurationSeconds = object.batchDurationSeconds ?? 0; message.maxParallelWorkers = object.maxParallelWorkers ?? 0; message.waitBetweenBatchesSeconds = object.waitBetweenBatchesSeconds ?? 0; - message.writeMode = (object.writeMode !== undefined && object.writeMode !== null) - ? QRepWriteMode.fromPartial(object.writeMode) - : undefined; - message.stagingPath = object.stagingPath ?? ""; + message.writeMode = + object.writeMode !== undefined && object.writeMode !== null + ? QRepWriteMode.fromPartial(object.writeMode) + : undefined; + message.stagingPath = object.stagingPath ?? ''; message.numRowsPerPartition = object.numRowsPerPartition ?? 0; return message; }, }; function createBaseQRepPartition(): QRepPartition { - return { partitionId: "", range: undefined, fullTablePartition: false }; + return { partitionId: '', range: undefined, fullTablePartition: false }; } export const QRepPartition = { - encode(message: QRepPartition, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.partitionId !== "") { + encode( + message: QRepPartition, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.partitionId !== '') { writer.uint32(18).string(message.partitionId); } if (message.range !== undefined) { @@ -4409,7 +5250,8 @@ export const QRepPartition = { }, decode(input: _m0.Reader | Uint8Array, length?: number): QRepPartition { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseQRepPartition(); while (reader.pos < end) { @@ -4447,15 +5289,19 @@ export const QRepPartition = { fromJSON(object: any): QRepPartition { return { - partitionId: isSet(object.partitionId) ? String(object.partitionId) : "", - range: isSet(object.range) ? PartitionRange.fromJSON(object.range) : undefined, - fullTablePartition: isSet(object.fullTablePartition) ? Boolean(object.fullTablePartition) : false, + partitionId: isSet(object.partitionId) ? String(object.partitionId) : '', + range: isSet(object.range) + ? PartitionRange.fromJSON(object.range) + : undefined, + fullTablePartition: isSet(object.fullTablePartition) + ? Boolean(object.fullTablePartition) + : false, }; }, toJSON(message: QRepPartition): unknown { const obj: any = {}; - if (message.partitionId !== "") { + if (message.partitionId !== '') { obj.partitionId = message.partitionId; } if (message.range !== undefined) { @@ -4467,15 +5313,20 @@ export const QRepPartition = { return obj; }, - create, I>>(base?: I): QRepPartition { + create, I>>( + base?: I + ): QRepPartition { return QRepPartition.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): QRepPartition { + fromPartial, I>>( + object: I + ): QRepPartition { const message = createBaseQRepPartition(); - message.partitionId = object.partitionId ?? ""; - message.range = (object.range !== undefined && object.range !== null) - ? PartitionRange.fromPartial(object.range) - : undefined; + message.partitionId = object.partitionId ?? ''; + message.range = + object.range !== undefined && object.range !== null + ? PartitionRange.fromPartial(object.range) + : undefined; message.fullTablePartition = object.fullTablePartition ?? false; return message; }, @@ -4486,7 +5337,10 @@ function createBaseQRepPartitionBatch(): QRepPartitionBatch { } export const QRepPartitionBatch = { - encode(message: QRepPartitionBatch, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: QRepPartitionBatch, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.batchId !== 0) { writer.uint32(8).int32(message.batchId); } @@ -4497,7 +5351,8 @@ export const QRepPartitionBatch = { }, decode(input: _m0.Reader | Uint8Array, length?: number): QRepPartitionBatch { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseQRepPartitionBatch(); while (reader.pos < end) { @@ -4515,7 +5370,9 @@ export const QRepPartitionBatch = { break; } - message.partitions.push(QRepPartition.decode(reader, reader.uint32())); + message.partitions.push( + QRepPartition.decode(reader, reader.uint32()) + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -4529,7 +5386,9 @@ export const QRepPartitionBatch = { fromJSON(object: any): QRepPartitionBatch { return { batchId: isSet(object.batchId) ? Number(object.batchId) : 0, - partitions: Array.isArray(object?.partitions) ? object.partitions.map((e: any) => QRepPartition.fromJSON(e)) : [], + partitions: Array.isArray(object?.partitions) + ? object.partitions.map((e: any) => QRepPartition.fromJSON(e)) + : [], }; }, @@ -4544,13 +5403,18 @@ export const QRepPartitionBatch = { return obj; }, - create, I>>(base?: I): QRepPartitionBatch { + create, I>>( + base?: I + ): QRepPartitionBatch { return QRepPartitionBatch.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): QRepPartitionBatch { + fromPartial, I>>( + object: I + ): QRepPartitionBatch { const message = createBaseQRepPartitionBatch(); message.batchId = object.batchId ?? 0; - message.partitions = object.partitions?.map((e) => QRepPartition.fromPartial(e)) || []; + message.partitions = + object.partitions?.map((e) => QRepPartition.fromPartial(e)) || []; return message; }, }; @@ -4560,7 +5424,10 @@ function createBaseQRepParitionResult(): QRepParitionResult { } export const QRepParitionResult = { - encode(message: QRepParitionResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: QRepParitionResult, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { for (const v of message.partitions) { QRepPartition.encode(v!, writer.uint32(10).fork()).ldelim(); } @@ -4568,7 +5435,8 @@ export const QRepParitionResult = { }, decode(input: _m0.Reader | Uint8Array, length?: number): QRepParitionResult { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseQRepParitionResult(); while (reader.pos < end) { @@ -4579,7 +5447,9 @@ export const QRepParitionResult = { break; } - message.partitions.push(QRepPartition.decode(reader, reader.uint32())); + message.partitions.push( + QRepPartition.decode(reader, reader.uint32()) + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -4592,7 +5462,9 @@ export const QRepParitionResult = { fromJSON(object: any): QRepParitionResult { return { - partitions: Array.isArray(object?.partitions) ? object.partitions.map((e: any) => QRepPartition.fromJSON(e)) : [], + partitions: Array.isArray(object?.partitions) + ? object.partitions.map((e: any) => QRepPartition.fromJSON(e)) + : [], }; }, @@ -4604,30 +5476,39 @@ export const QRepParitionResult = { return obj; }, - create, I>>(base?: I): QRepParitionResult { + create, I>>( + base?: I + ): QRepParitionResult { return QRepParitionResult.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): QRepParitionResult { + fromPartial, I>>( + object: I + ): QRepParitionResult { const message = createBaseQRepParitionResult(); - message.partitions = object.partitions?.map((e) => QRepPartition.fromPartial(e)) || []; + message.partitions = + object.partitions?.map((e) => QRepPartition.fromPartial(e)) || []; return message; }, }; function createBaseDropFlowInput(): DropFlowInput { - return { flowName: "" }; + return { flowName: '' }; } export const DropFlowInput = { - encode(message: DropFlowInput, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.flowName !== "") { + encode( + message: DropFlowInput, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.flowName !== '') { writer.uint32(10).string(message.flowName); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): DropFlowInput { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseDropFlowInput(); while (reader.pos < end) { @@ -4650,23 +5531,27 @@ export const DropFlowInput = { }, fromJSON(object: any): DropFlowInput { - return { flowName: isSet(object.flowName) ? String(object.flowName) : "" }; + return { flowName: isSet(object.flowName) ? String(object.flowName) : '' }; }, toJSON(message: DropFlowInput): unknown { const obj: any = {}; - if (message.flowName !== "") { + if (message.flowName !== '') { obj.flowName = message.flowName; } return obj; }, - create, I>>(base?: I): DropFlowInput { + create, I>>( + base?: I + ): DropFlowInput { return DropFlowInput.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): DropFlowInput { + fromPartial, I>>( + object: I + ): DropFlowInput { const message = createBaseDropFlowInput(); - message.flowName = object.flowName ?? ""; + message.flowName = object.flowName ?? ''; return message; }, }; @@ -4675,31 +5560,46 @@ declare const self: any | undefined; declare const window: any | undefined; declare const global: any | undefined; const tsProtoGlobalThis: any = (() => { - if (typeof globalThis !== "undefined") { + if (typeof globalThis !== 'undefined') { return globalThis; } - if (typeof self !== "undefined") { + if (typeof self !== 'undefined') { return self; } - if (typeof window !== "undefined") { + if (typeof window !== 'undefined') { return window; } - if (typeof global !== "undefined") { + if (typeof global !== 'undefined') { return global; } - throw "Unable to locate global object"; + throw 'Unable to locate global object'; })(); -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; - -export type DeepPartial = T extends Builtin ? T - : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> - : T extends {} ? { [K in keyof T]?: DeepPartial } +type Builtin = + | Date + | Function + | Uint8Array + | string + | number + | boolean + | undefined; + +export type DeepPartial = T extends Builtin + ? T + : T extends Array + ? Array> + : T extends ReadonlyArray + ? ReadonlyArray> + : T extends {} + ? { [K in keyof T]?: DeepPartial } : Partial; type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin ? P - : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; +export type Exact = P extends Builtin + ? P + : P & { [K in keyof P]: Exact } & { + [K in Exclude>]: never; + }; function toTimestamp(date: Date): Timestamp { const seconds = date.getTime() / 1_000; @@ -4716,7 +5616,7 @@ function fromTimestamp(t: Timestamp): Date { function fromJsonTimestamp(o: any): Date { if (o instanceof Date) { return o; - } else if (typeof o === "string") { + } else if (typeof o === 'string') { return new Date(o); } else { return fromTimestamp(Timestamp.fromJSON(o)); @@ -4725,7 +5625,9 @@ function fromJsonTimestamp(o: any): Date { function longToNumber(long: Long): number { if (long.gt(Number.MAX_SAFE_INTEGER)) { - throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + throw new tsProtoGlobalThis.Error( + 'Value is larger than Number.MAX_SAFE_INTEGER' + ); } return long.toNumber(); } @@ -4736,7 +5638,7 @@ if (_m0.util.Long !== Long) { } function isObject(value: any): boolean { - return typeof value === "object" && value !== null; + return typeof value === 'object' && value !== null; } function isSet(value: any): boolean { diff --git a/ui/grpc_generated/google/protobuf/timestamp.ts b/ui/grpc_generated/google/protobuf/timestamp.ts index 560af8a4e0..4743f19380 100644 --- a/ui/grpc_generated/google/protobuf/timestamp.ts +++ b/ui/grpc_generated/google/protobuf/timestamp.ts @@ -1,8 +1,8 @@ /* eslint-disable */ -import Long from "long"; -import _m0 from "protobufjs/minimal"; +import Long from 'long'; +import _m0 from 'protobufjs/minimal'; -export const protobufPackage = "google.protobuf"; +export const protobufPackage = 'google.protobuf'; /** * A Timestamp represents a point in time independent of any time zone or local @@ -116,7 +116,10 @@ function createBaseTimestamp(): Timestamp { } export const Timestamp = { - encode(message: Timestamp, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: Timestamp, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.seconds !== 0) { writer.uint32(8).int64(message.seconds); } @@ -127,7 +130,8 @@ export const Timestamp = { }, decode(input: _m0.Reader | Uint8Array, length?: number): Timestamp { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseTimestamp(); while (reader.pos < end) { @@ -177,7 +181,9 @@ export const Timestamp = { create, I>>(base?: I): Timestamp { return Timestamp.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): Timestamp { + fromPartial, I>>( + object: I + ): Timestamp { const message = createBaseTimestamp(); message.seconds = object.seconds ?? 0; message.nanos = object.nanos ?? 0; @@ -189,35 +195,52 @@ declare const self: any | undefined; declare const window: any | undefined; declare const global: any | undefined; const tsProtoGlobalThis: any = (() => { - if (typeof globalThis !== "undefined") { + if (typeof globalThis !== 'undefined') { return globalThis; } - if (typeof self !== "undefined") { + if (typeof self !== 'undefined') { return self; } - if (typeof window !== "undefined") { + if (typeof window !== 'undefined') { return window; } - if (typeof global !== "undefined") { + if (typeof global !== 'undefined') { return global; } - throw "Unable to locate global object"; + throw 'Unable to locate global object'; })(); -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; - -export type DeepPartial = T extends Builtin ? T - : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> - : T extends {} ? { [K in keyof T]?: DeepPartial } +type Builtin = + | Date + | Function + | Uint8Array + | string + | number + | boolean + | undefined; + +export type DeepPartial = T extends Builtin + ? T + : T extends Array + ? Array> + : T extends ReadonlyArray + ? ReadonlyArray> + : T extends {} + ? { [K in keyof T]?: DeepPartial } : Partial; type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin ? P - : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; +export type Exact = P extends Builtin + ? P + : P & { [K in keyof P]: Exact } & { + [K in Exclude>]: never; + }; function longToNumber(long: Long): number { if (long.gt(Number.MAX_SAFE_INTEGER)) { - throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + throw new tsProtoGlobalThis.Error( + 'Value is larger than Number.MAX_SAFE_INTEGER' + ); } return long.toNumber(); } diff --git a/ui/grpc_generated/peers.ts b/ui/grpc_generated/peers.ts index acab108c34..d54289c1d4 100644 --- a/ui/grpc_generated/peers.ts +++ b/ui/grpc_generated/peers.ts @@ -1,8 +1,8 @@ /* eslint-disable */ -import Long from "long"; -import _m0 from "protobufjs/minimal"; +import Long from 'long'; +import _m0 from 'protobufjs/minimal'; -export const protobufPackage = "peerdb_peers"; +export const protobufPackage = 'peerdb_peers'; export enum DBType { BIGQUERY = 0, @@ -18,28 +18,28 @@ export enum DBType { export function dBTypeFromJSON(object: any): DBType { switch (object) { case 0: - case "BIGQUERY": + case 'BIGQUERY': return DBType.BIGQUERY; case 1: - case "SNOWFLAKE": + case 'SNOWFLAKE': return DBType.SNOWFLAKE; case 2: - case "MONGO": + case 'MONGO': return DBType.MONGO; case 3: - case "POSTGRES": + case 'POSTGRES': return DBType.POSTGRES; case 4: - case "EVENTHUB": + case 'EVENTHUB': return DBType.EVENTHUB; case 5: - case "S3": + case 'S3': return DBType.S3; case 6: - case "SQLSERVER": + case 'SQLSERVER': return DBType.SQLSERVER; case -1: - case "UNRECOGNIZED": + case 'UNRECOGNIZED': default: return DBType.UNRECOGNIZED; } @@ -48,22 +48,22 @@ export function dBTypeFromJSON(object: any): DBType { export function dBTypeToJSON(object: DBType): string { switch (object) { case DBType.BIGQUERY: - return "BIGQUERY"; + return 'BIGQUERY'; case DBType.SNOWFLAKE: - return "SNOWFLAKE"; + return 'SNOWFLAKE'; case DBType.MONGO: - return "MONGO"; + return 'MONGO'; case DBType.POSTGRES: - return "POSTGRES"; + return 'POSTGRES'; case DBType.EVENTHUB: - return "EVENTHUB"; + return 'EVENTHUB'; case DBType.S3: - return "S3"; + return 'S3'; case DBType.SQLSERVER: - return "SQLSERVER"; + return 'SQLSERVER'; case DBType.UNRECOGNIZED: default: - return "UNRECOGNIZED"; + return 'UNRECOGNIZED'; } } @@ -143,48 +143,52 @@ export interface Peer { function createBaseSnowflakeConfig(): SnowflakeConfig { return { - accountId: "", - username: "", - privateKey: "", - database: "", - warehouse: "", - role: "", + accountId: '', + username: '', + privateKey: '', + database: '', + warehouse: '', + role: '', queryTimeout: 0, - s3Integration: "", + s3Integration: '', }; } export const SnowflakeConfig = { - encode(message: SnowflakeConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.accountId !== "") { + encode( + message: SnowflakeConfig, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.accountId !== '') { writer.uint32(10).string(message.accountId); } - if (message.username !== "") { + if (message.username !== '') { writer.uint32(18).string(message.username); } - if (message.privateKey !== "") { + if (message.privateKey !== '') { writer.uint32(26).string(message.privateKey); } - if (message.database !== "") { + if (message.database !== '') { writer.uint32(34).string(message.database); } - if (message.warehouse !== "") { + if (message.warehouse !== '') { writer.uint32(50).string(message.warehouse); } - if (message.role !== "") { + if (message.role !== '') { writer.uint32(58).string(message.role); } if (message.queryTimeout !== 0) { writer.uint32(64).uint64(message.queryTimeout); } - if (message.s3Integration !== "") { + if (message.s3Integration !== '') { writer.uint32(74).string(message.s3Integration); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): SnowflakeConfig { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSnowflakeConfig(); while (reader.pos < end) { @@ -257,119 +261,131 @@ export const SnowflakeConfig = { fromJSON(object: any): SnowflakeConfig { return { - accountId: isSet(object.accountId) ? String(object.accountId) : "", - username: isSet(object.username) ? String(object.username) : "", - privateKey: isSet(object.privateKey) ? String(object.privateKey) : "", - database: isSet(object.database) ? String(object.database) : "", - warehouse: isSet(object.warehouse) ? String(object.warehouse) : "", - role: isSet(object.role) ? String(object.role) : "", - queryTimeout: isSet(object.queryTimeout) ? Number(object.queryTimeout) : 0, - s3Integration: isSet(object.s3Integration) ? String(object.s3Integration) : "", + accountId: isSet(object.accountId) ? String(object.accountId) : '', + username: isSet(object.username) ? String(object.username) : '', + privateKey: isSet(object.privateKey) ? String(object.privateKey) : '', + database: isSet(object.database) ? String(object.database) : '', + warehouse: isSet(object.warehouse) ? String(object.warehouse) : '', + role: isSet(object.role) ? String(object.role) : '', + queryTimeout: isSet(object.queryTimeout) + ? Number(object.queryTimeout) + : 0, + s3Integration: isSet(object.s3Integration) + ? String(object.s3Integration) + : '', }; }, toJSON(message: SnowflakeConfig): unknown { const obj: any = {}; - if (message.accountId !== "") { + if (message.accountId !== '') { obj.accountId = message.accountId; } - if (message.username !== "") { + if (message.username !== '') { obj.username = message.username; } - if (message.privateKey !== "") { + if (message.privateKey !== '') { obj.privateKey = message.privateKey; } - if (message.database !== "") { + if (message.database !== '') { obj.database = message.database; } - if (message.warehouse !== "") { + if (message.warehouse !== '') { obj.warehouse = message.warehouse; } - if (message.role !== "") { + if (message.role !== '') { obj.role = message.role; } if (message.queryTimeout !== 0) { obj.queryTimeout = Math.round(message.queryTimeout); } - if (message.s3Integration !== "") { + if (message.s3Integration !== '') { obj.s3Integration = message.s3Integration; } return obj; }, - create, I>>(base?: I): SnowflakeConfig { + create, I>>( + base?: I + ): SnowflakeConfig { return SnowflakeConfig.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): SnowflakeConfig { + fromPartial, I>>( + object: I + ): SnowflakeConfig { const message = createBaseSnowflakeConfig(); - message.accountId = object.accountId ?? ""; - message.username = object.username ?? ""; - message.privateKey = object.privateKey ?? ""; - message.database = object.database ?? ""; - message.warehouse = object.warehouse ?? ""; - message.role = object.role ?? ""; + message.accountId = object.accountId ?? ''; + message.username = object.username ?? ''; + message.privateKey = object.privateKey ?? ''; + message.database = object.database ?? ''; + message.warehouse = object.warehouse ?? ''; + message.role = object.role ?? ''; message.queryTimeout = object.queryTimeout ?? 0; - message.s3Integration = object.s3Integration ?? ""; + message.s3Integration = object.s3Integration ?? ''; return message; }, }; function createBaseBigqueryConfig(): BigqueryConfig { return { - authType: "", - projectId: "", - privateKeyId: "", - privateKey: "", - clientEmail: "", - clientId: "", - authUri: "", - tokenUri: "", - authProviderX509CertUrl: "", - clientX509CertUrl: "", - datasetId: "", + authType: '', + projectId: '', + privateKeyId: '', + privateKey: '', + clientEmail: '', + clientId: '', + authUri: '', + tokenUri: '', + authProviderX509CertUrl: '', + clientX509CertUrl: '', + datasetId: '', }; } export const BigqueryConfig = { - encode(message: BigqueryConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.authType !== "") { + encode( + message: BigqueryConfig, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.authType !== '') { writer.uint32(10).string(message.authType); } - if (message.projectId !== "") { + if (message.projectId !== '') { writer.uint32(18).string(message.projectId); } - if (message.privateKeyId !== "") { + if (message.privateKeyId !== '') { writer.uint32(26).string(message.privateKeyId); } - if (message.privateKey !== "") { + if (message.privateKey !== '') { writer.uint32(34).string(message.privateKey); } - if (message.clientEmail !== "") { + if (message.clientEmail !== '') { writer.uint32(42).string(message.clientEmail); } - if (message.clientId !== "") { + if (message.clientId !== '') { writer.uint32(50).string(message.clientId); } - if (message.authUri !== "") { + if (message.authUri !== '') { writer.uint32(58).string(message.authUri); } - if (message.tokenUri !== "") { + if (message.tokenUri !== '') { writer.uint32(66).string(message.tokenUri); } - if (message.authProviderX509CertUrl !== "") { + if (message.authProviderX509CertUrl !== '') { writer.uint32(74).string(message.authProviderX509CertUrl); } - if (message.clientX509CertUrl !== "") { + if (message.clientX509CertUrl !== '') { writer.uint32(82).string(message.clientX509CertUrl); } - if (message.datasetId !== "") { + if (message.datasetId !== '') { writer.uint32(90).string(message.datasetId); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): BigqueryConfig { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseBigqueryConfig(); while (reader.pos < end) { @@ -463,104 +479,124 @@ export const BigqueryConfig = { fromJSON(object: any): BigqueryConfig { return { - authType: isSet(object.authType) ? String(object.authType) : "", - projectId: isSet(object.projectId) ? String(object.projectId) : "", - privateKeyId: isSet(object.privateKeyId) ? String(object.privateKeyId) : "", - privateKey: isSet(object.privateKey) ? String(object.privateKey) : "", - clientEmail: isSet(object.clientEmail) ? String(object.clientEmail) : "", - clientId: isSet(object.clientId) ? String(object.clientId) : "", - authUri: isSet(object.authUri) ? String(object.authUri) : "", - tokenUri: isSet(object.tokenUri) ? String(object.tokenUri) : "", - authProviderX509CertUrl: isSet(object.authProviderX509CertUrl) ? String(object.authProviderX509CertUrl) : "", - clientX509CertUrl: isSet(object.clientX509CertUrl) ? String(object.clientX509CertUrl) : "", - datasetId: isSet(object.datasetId) ? String(object.datasetId) : "", + authType: isSet(object.authType) ? String(object.authType) : '', + projectId: isSet(object.projectId) ? String(object.projectId) : '', + privateKeyId: isSet(object.privateKeyId) + ? String(object.privateKeyId) + : '', + privateKey: isSet(object.privateKey) ? String(object.privateKey) : '', + clientEmail: isSet(object.clientEmail) ? String(object.clientEmail) : '', + clientId: isSet(object.clientId) ? String(object.clientId) : '', + authUri: isSet(object.authUri) ? String(object.authUri) : '', + tokenUri: isSet(object.tokenUri) ? String(object.tokenUri) : '', + authProviderX509CertUrl: isSet(object.authProviderX509CertUrl) + ? String(object.authProviderX509CertUrl) + : '', + clientX509CertUrl: isSet(object.clientX509CertUrl) + ? String(object.clientX509CertUrl) + : '', + datasetId: isSet(object.datasetId) ? String(object.datasetId) : '', }; }, toJSON(message: BigqueryConfig): unknown { const obj: any = {}; - if (message.authType !== "") { + if (message.authType !== '') { obj.authType = message.authType; } - if (message.projectId !== "") { + if (message.projectId !== '') { obj.projectId = message.projectId; } - if (message.privateKeyId !== "") { + if (message.privateKeyId !== '') { obj.privateKeyId = message.privateKeyId; } - if (message.privateKey !== "") { + if (message.privateKey !== '') { obj.privateKey = message.privateKey; } - if (message.clientEmail !== "") { + if (message.clientEmail !== '') { obj.clientEmail = message.clientEmail; } - if (message.clientId !== "") { + if (message.clientId !== '') { obj.clientId = message.clientId; } - if (message.authUri !== "") { + if (message.authUri !== '') { obj.authUri = message.authUri; } - if (message.tokenUri !== "") { + if (message.tokenUri !== '') { obj.tokenUri = message.tokenUri; } - if (message.authProviderX509CertUrl !== "") { + if (message.authProviderX509CertUrl !== '') { obj.authProviderX509CertUrl = message.authProviderX509CertUrl; } - if (message.clientX509CertUrl !== "") { + if (message.clientX509CertUrl !== '') { obj.clientX509CertUrl = message.clientX509CertUrl; } - if (message.datasetId !== "") { + if (message.datasetId !== '') { obj.datasetId = message.datasetId; } return obj; }, - create, I>>(base?: I): BigqueryConfig { + create, I>>( + base?: I + ): BigqueryConfig { return BigqueryConfig.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): BigqueryConfig { + fromPartial, I>>( + object: I + ): BigqueryConfig { const message = createBaseBigqueryConfig(); - message.authType = object.authType ?? ""; - message.projectId = object.projectId ?? ""; - message.privateKeyId = object.privateKeyId ?? ""; - message.privateKey = object.privateKey ?? ""; - message.clientEmail = object.clientEmail ?? ""; - message.clientId = object.clientId ?? ""; - message.authUri = object.authUri ?? ""; - message.tokenUri = object.tokenUri ?? ""; - message.authProviderX509CertUrl = object.authProviderX509CertUrl ?? ""; - message.clientX509CertUrl = object.clientX509CertUrl ?? ""; - message.datasetId = object.datasetId ?? ""; + message.authType = object.authType ?? ''; + message.projectId = object.projectId ?? ''; + message.privateKeyId = object.privateKeyId ?? ''; + message.privateKey = object.privateKey ?? ''; + message.clientEmail = object.clientEmail ?? ''; + message.clientId = object.clientId ?? ''; + message.authUri = object.authUri ?? ''; + message.tokenUri = object.tokenUri ?? ''; + message.authProviderX509CertUrl = object.authProviderX509CertUrl ?? ''; + message.clientX509CertUrl = object.clientX509CertUrl ?? ''; + message.datasetId = object.datasetId ?? ''; return message; }, }; function createBaseMongoConfig(): MongoConfig { - return { username: "", password: "", clusterurl: "", clusterport: 0, database: "" }; + return { + username: '', + password: '', + clusterurl: '', + clusterport: 0, + database: '', + }; } export const MongoConfig = { - encode(message: MongoConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.username !== "") { + encode( + message: MongoConfig, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.username !== '') { writer.uint32(10).string(message.username); } - if (message.password !== "") { + if (message.password !== '') { writer.uint32(18).string(message.password); } - if (message.clusterurl !== "") { + if (message.clusterurl !== '') { writer.uint32(26).string(message.clusterurl); } if (message.clusterport !== 0) { writer.uint32(32).int32(message.clusterport); } - if (message.database !== "") { + if (message.database !== '') { writer.uint32(42).string(message.database); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): MongoConfig { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseMongoConfig(); while (reader.pos < end) { @@ -612,29 +648,29 @@ export const MongoConfig = { fromJSON(object: any): MongoConfig { return { - username: isSet(object.username) ? String(object.username) : "", - password: isSet(object.password) ? String(object.password) : "", - clusterurl: isSet(object.clusterurl) ? String(object.clusterurl) : "", + username: isSet(object.username) ? String(object.username) : '', + password: isSet(object.password) ? String(object.password) : '', + clusterurl: isSet(object.clusterurl) ? String(object.clusterurl) : '', clusterport: isSet(object.clusterport) ? Number(object.clusterport) : 0, - database: isSet(object.database) ? String(object.database) : "", + database: isSet(object.database) ? String(object.database) : '', }; }, toJSON(message: MongoConfig): unknown { const obj: any = {}; - if (message.username !== "") { + if (message.username !== '') { obj.username = message.username; } - if (message.password !== "") { + if (message.password !== '') { obj.password = message.password; } - if (message.clusterurl !== "") { + if (message.clusterurl !== '') { obj.clusterurl = message.clusterurl; } if (message.clusterport !== 0) { obj.clusterport = Math.round(message.clusterport); } - if (message.database !== "") { + if (message.database !== '') { obj.database = message.database; } return obj; @@ -643,46 +679,59 @@ export const MongoConfig = { create, I>>(base?: I): MongoConfig { return MongoConfig.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): MongoConfig { + fromPartial, I>>( + object: I + ): MongoConfig { const message = createBaseMongoConfig(); - message.username = object.username ?? ""; - message.password = object.password ?? ""; - message.clusterurl = object.clusterurl ?? ""; + message.username = object.username ?? ''; + message.password = object.password ?? ''; + message.clusterurl = object.clusterurl ?? ''; message.clusterport = object.clusterport ?? 0; - message.database = object.database ?? ""; + message.database = object.database ?? ''; return message; }, }; function createBasePostgresConfig(): PostgresConfig { - return { host: "", port: 0, user: "", password: "", database: "", transactionSnapshot: "" }; + return { + host: '', + port: 0, + user: '', + password: '', + database: '', + transactionSnapshot: '', + }; } export const PostgresConfig = { - encode(message: PostgresConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.host !== "") { + encode( + message: PostgresConfig, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.host !== '') { writer.uint32(10).string(message.host); } if (message.port !== 0) { writer.uint32(16).uint32(message.port); } - if (message.user !== "") { + if (message.user !== '') { writer.uint32(26).string(message.user); } - if (message.password !== "") { + if (message.password !== '') { writer.uint32(34).string(message.password); } - if (message.database !== "") { + if (message.database !== '') { writer.uint32(42).string(message.database); } - if (message.transactionSnapshot !== "") { + if (message.transactionSnapshot !== '') { writer.uint32(50).string(message.transactionSnapshot); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): PostgresConfig { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBasePostgresConfig(); while (reader.pos < end) { @@ -741,76 +790,94 @@ export const PostgresConfig = { fromJSON(object: any): PostgresConfig { return { - host: isSet(object.host) ? String(object.host) : "", + host: isSet(object.host) ? String(object.host) : '', port: isSet(object.port) ? Number(object.port) : 0, - user: isSet(object.user) ? String(object.user) : "", - password: isSet(object.password) ? String(object.password) : "", - database: isSet(object.database) ? String(object.database) : "", - transactionSnapshot: isSet(object.transactionSnapshot) ? String(object.transactionSnapshot) : "", + user: isSet(object.user) ? String(object.user) : '', + password: isSet(object.password) ? String(object.password) : '', + database: isSet(object.database) ? String(object.database) : '', + transactionSnapshot: isSet(object.transactionSnapshot) + ? String(object.transactionSnapshot) + : '', }; }, toJSON(message: PostgresConfig): unknown { const obj: any = {}; - if (message.host !== "") { + if (message.host !== '') { obj.host = message.host; } if (message.port !== 0) { obj.port = Math.round(message.port); } - if (message.user !== "") { + if (message.user !== '') { obj.user = message.user; } - if (message.password !== "") { + if (message.password !== '') { obj.password = message.password; } - if (message.database !== "") { + if (message.database !== '') { obj.database = message.database; } - if (message.transactionSnapshot !== "") { + if (message.transactionSnapshot !== '') { obj.transactionSnapshot = message.transactionSnapshot; } return obj; }, - create, I>>(base?: I): PostgresConfig { + create, I>>( + base?: I + ): PostgresConfig { return PostgresConfig.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): PostgresConfig { + fromPartial, I>>( + object: I + ): PostgresConfig { const message = createBasePostgresConfig(); - message.host = object.host ?? ""; + message.host = object.host ?? ''; message.port = object.port ?? 0; - message.user = object.user ?? ""; - message.password = object.password ?? ""; - message.database = object.database ?? ""; - message.transactionSnapshot = object.transactionSnapshot ?? ""; + message.user = object.user ?? ''; + message.password = object.password ?? ''; + message.database = object.database ?? ''; + message.transactionSnapshot = object.transactionSnapshot ?? ''; return message; }, }; function createBaseEventHubConfig(): EventHubConfig { - return { namespace: "", resourceGroup: "", location: "", metadataDb: undefined }; + return { + namespace: '', + resourceGroup: '', + location: '', + metadataDb: undefined, + }; } export const EventHubConfig = { - encode(message: EventHubConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.namespace !== "") { + encode( + message: EventHubConfig, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.namespace !== '') { writer.uint32(10).string(message.namespace); } - if (message.resourceGroup !== "") { + if (message.resourceGroup !== '') { writer.uint32(18).string(message.resourceGroup); } - if (message.location !== "") { + if (message.location !== '') { writer.uint32(26).string(message.location); } if (message.metadataDb !== undefined) { - PostgresConfig.encode(message.metadataDb, writer.uint32(34).fork()).ldelim(); + PostgresConfig.encode( + message.metadataDb, + writer.uint32(34).fork() + ).ldelim(); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): EventHubConfig { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseEventHubConfig(); while (reader.pos < end) { @@ -855,22 +922,26 @@ export const EventHubConfig = { fromJSON(object: any): EventHubConfig { return { - namespace: isSet(object.namespace) ? String(object.namespace) : "", - resourceGroup: isSet(object.resourceGroup) ? String(object.resourceGroup) : "", - location: isSet(object.location) ? String(object.location) : "", - metadataDb: isSet(object.metadataDb) ? PostgresConfig.fromJSON(object.metadataDb) : undefined, + namespace: isSet(object.namespace) ? String(object.namespace) : '', + resourceGroup: isSet(object.resourceGroup) + ? String(object.resourceGroup) + : '', + location: isSet(object.location) ? String(object.location) : '', + metadataDb: isSet(object.metadataDb) + ? PostgresConfig.fromJSON(object.metadataDb) + : undefined, }; }, toJSON(message: EventHubConfig): unknown { const obj: any = {}; - if (message.namespace !== "") { + if (message.namespace !== '') { obj.namespace = message.namespace; } - if (message.resourceGroup !== "") { + if (message.resourceGroup !== '') { obj.resourceGroup = message.resourceGroup; } - if (message.location !== "") { + if (message.location !== '') { obj.location = message.location; } if (message.metadataDb !== undefined) { @@ -879,35 +950,44 @@ export const EventHubConfig = { return obj; }, - create, I>>(base?: I): EventHubConfig { + create, I>>( + base?: I + ): EventHubConfig { return EventHubConfig.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): EventHubConfig { + fromPartial, I>>( + object: I + ): EventHubConfig { const message = createBaseEventHubConfig(); - message.namespace = object.namespace ?? ""; - message.resourceGroup = object.resourceGroup ?? ""; - message.location = object.location ?? ""; - message.metadataDb = (object.metadataDb !== undefined && object.metadataDb !== null) - ? PostgresConfig.fromPartial(object.metadataDb) - : undefined; + message.namespace = object.namespace ?? ''; + message.resourceGroup = object.resourceGroup ?? ''; + message.location = object.location ?? ''; + message.metadataDb = + object.metadataDb !== undefined && object.metadataDb !== null + ? PostgresConfig.fromPartial(object.metadataDb) + : undefined; return message; }, }; function createBaseS3Config(): S3Config { - return { url: "" }; + return { url: '' }; } export const S3Config = { - encode(message: S3Config, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.url !== "") { + encode( + message: S3Config, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.url !== '') { writer.uint32(10).string(message.url); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): S3Config { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseS3Config(); while (reader.pos < end) { @@ -930,12 +1010,12 @@ export const S3Config = { }, fromJSON(object: any): S3Config { - return { url: isSet(object.url) ? String(object.url) : "" }; + return { url: isSet(object.url) ? String(object.url) : '' }; }, toJSON(message: S3Config): unknown { const obj: any = {}; - if (message.url !== "") { + if (message.url !== '') { obj.url = message.url; } return obj; @@ -946,37 +1026,41 @@ export const S3Config = { }, fromPartial, I>>(object: I): S3Config { const message = createBaseS3Config(); - message.url = object.url ?? ""; + message.url = object.url ?? ''; return message; }, }; function createBaseSqlServerConfig(): SqlServerConfig { - return { server: "", port: 0, user: "", password: "", database: "" }; + return { server: '', port: 0, user: '', password: '', database: '' }; } export const SqlServerConfig = { - encode(message: SqlServerConfig, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.server !== "") { + encode( + message: SqlServerConfig, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.server !== '') { writer.uint32(10).string(message.server); } if (message.port !== 0) { writer.uint32(16).uint32(message.port); } - if (message.user !== "") { + if (message.user !== '') { writer.uint32(26).string(message.user); } - if (message.password !== "") { + if (message.password !== '') { writer.uint32(34).string(message.password); } - if (message.database !== "") { + if (message.database !== '') { writer.uint32(42).string(message.database); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): SqlServerConfig { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseSqlServerConfig(); while (reader.pos < end) { @@ -1028,51 +1112,55 @@ export const SqlServerConfig = { fromJSON(object: any): SqlServerConfig { return { - server: isSet(object.server) ? String(object.server) : "", + server: isSet(object.server) ? String(object.server) : '', port: isSet(object.port) ? Number(object.port) : 0, - user: isSet(object.user) ? String(object.user) : "", - password: isSet(object.password) ? String(object.password) : "", - database: isSet(object.database) ? String(object.database) : "", + user: isSet(object.user) ? String(object.user) : '', + password: isSet(object.password) ? String(object.password) : '', + database: isSet(object.database) ? String(object.database) : '', }; }, toJSON(message: SqlServerConfig): unknown { const obj: any = {}; - if (message.server !== "") { + if (message.server !== '') { obj.server = message.server; } if (message.port !== 0) { obj.port = Math.round(message.port); } - if (message.user !== "") { + if (message.user !== '') { obj.user = message.user; } - if (message.password !== "") { + if (message.password !== '') { obj.password = message.password; } - if (message.database !== "") { + if (message.database !== '') { obj.database = message.database; } return obj; }, - create, I>>(base?: I): SqlServerConfig { + create, I>>( + base?: I + ): SqlServerConfig { return SqlServerConfig.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): SqlServerConfig { + fromPartial, I>>( + object: I + ): SqlServerConfig { const message = createBaseSqlServerConfig(); - message.server = object.server ?? ""; + message.server = object.server ?? ''; message.port = object.port ?? 0; - message.user = object.user ?? ""; - message.password = object.password ?? ""; - message.database = object.database ?? ""; + message.user = object.user ?? ''; + message.password = object.password ?? ''; + message.database = object.database ?? ''; return message; }, }; function createBasePeer(): Peer { return { - name: "", + name: '', type: 0, snowflakeConfig: undefined, bigqueryConfig: undefined, @@ -1086,38 +1174,57 @@ function createBasePeer(): Peer { export const Peer = { encode(message: Peer, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.name !== "") { + if (message.name !== '') { writer.uint32(10).string(message.name); } if (message.type !== 0) { writer.uint32(16).int32(message.type); } if (message.snowflakeConfig !== undefined) { - SnowflakeConfig.encode(message.snowflakeConfig, writer.uint32(26).fork()).ldelim(); + SnowflakeConfig.encode( + message.snowflakeConfig, + writer.uint32(26).fork() + ).ldelim(); } if (message.bigqueryConfig !== undefined) { - BigqueryConfig.encode(message.bigqueryConfig, writer.uint32(34).fork()).ldelim(); + BigqueryConfig.encode( + message.bigqueryConfig, + writer.uint32(34).fork() + ).ldelim(); } if (message.mongoConfig !== undefined) { - MongoConfig.encode(message.mongoConfig, writer.uint32(42).fork()).ldelim(); + MongoConfig.encode( + message.mongoConfig, + writer.uint32(42).fork() + ).ldelim(); } if (message.postgresConfig !== undefined) { - PostgresConfig.encode(message.postgresConfig, writer.uint32(50).fork()).ldelim(); + PostgresConfig.encode( + message.postgresConfig, + writer.uint32(50).fork() + ).ldelim(); } if (message.eventhubConfig !== undefined) { - EventHubConfig.encode(message.eventhubConfig, writer.uint32(58).fork()).ldelim(); + EventHubConfig.encode( + message.eventhubConfig, + writer.uint32(58).fork() + ).ldelim(); } if (message.s3Config !== undefined) { S3Config.encode(message.s3Config, writer.uint32(66).fork()).ldelim(); } if (message.sqlserverConfig !== undefined) { - SqlServerConfig.encode(message.sqlserverConfig, writer.uint32(74).fork()).ldelim(); + SqlServerConfig.encode( + message.sqlserverConfig, + writer.uint32(74).fork() + ).ldelim(); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): Peer { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBasePeer(); while (reader.pos < end) { @@ -1142,14 +1249,20 @@ export const Peer = { break; } - message.snowflakeConfig = SnowflakeConfig.decode(reader, reader.uint32()); + message.snowflakeConfig = SnowflakeConfig.decode( + reader, + reader.uint32() + ); continue; case 4: if (tag !== 34) { break; } - message.bigqueryConfig = BigqueryConfig.decode(reader, reader.uint32()); + message.bigqueryConfig = BigqueryConfig.decode( + reader, + reader.uint32() + ); continue; case 5: if (tag !== 42) { @@ -1163,14 +1276,20 @@ export const Peer = { break; } - message.postgresConfig = PostgresConfig.decode(reader, reader.uint32()); + message.postgresConfig = PostgresConfig.decode( + reader, + reader.uint32() + ); continue; case 7: if (tag !== 58) { break; } - message.eventhubConfig = EventHubConfig.decode(reader, reader.uint32()); + message.eventhubConfig = EventHubConfig.decode( + reader, + reader.uint32() + ); continue; case 8: if (tag !== 66) { @@ -1184,7 +1303,10 @@ export const Peer = { break; } - message.sqlserverConfig = SqlServerConfig.decode(reader, reader.uint32()); + message.sqlserverConfig = SqlServerConfig.decode( + reader, + reader.uint32() + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -1197,21 +1319,35 @@ export const Peer = { fromJSON(object: any): Peer { return { - name: isSet(object.name) ? String(object.name) : "", + name: isSet(object.name) ? String(object.name) : '', type: isSet(object.type) ? dBTypeFromJSON(object.type) : 0, - snowflakeConfig: isSet(object.snowflakeConfig) ? SnowflakeConfig.fromJSON(object.snowflakeConfig) : undefined, - bigqueryConfig: isSet(object.bigqueryConfig) ? BigqueryConfig.fromJSON(object.bigqueryConfig) : undefined, - mongoConfig: isSet(object.mongoConfig) ? MongoConfig.fromJSON(object.mongoConfig) : undefined, - postgresConfig: isSet(object.postgresConfig) ? PostgresConfig.fromJSON(object.postgresConfig) : undefined, - eventhubConfig: isSet(object.eventhubConfig) ? EventHubConfig.fromJSON(object.eventhubConfig) : undefined, - s3Config: isSet(object.s3Config) ? S3Config.fromJSON(object.s3Config) : undefined, - sqlserverConfig: isSet(object.sqlserverConfig) ? SqlServerConfig.fromJSON(object.sqlserverConfig) : undefined, + snowflakeConfig: isSet(object.snowflakeConfig) + ? SnowflakeConfig.fromJSON(object.snowflakeConfig) + : undefined, + bigqueryConfig: isSet(object.bigqueryConfig) + ? BigqueryConfig.fromJSON(object.bigqueryConfig) + : undefined, + mongoConfig: isSet(object.mongoConfig) + ? MongoConfig.fromJSON(object.mongoConfig) + : undefined, + postgresConfig: isSet(object.postgresConfig) + ? PostgresConfig.fromJSON(object.postgresConfig) + : undefined, + eventhubConfig: isSet(object.eventhubConfig) + ? EventHubConfig.fromJSON(object.eventhubConfig) + : undefined, + s3Config: isSet(object.s3Config) + ? S3Config.fromJSON(object.s3Config) + : undefined, + sqlserverConfig: isSet(object.sqlserverConfig) + ? SqlServerConfig.fromJSON(object.sqlserverConfig) + : undefined, }; }, toJSON(message: Peer): unknown { const obj: any = {}; - if (message.name !== "") { + if (message.name !== '') { obj.name = message.name; } if (message.type !== 0) { @@ -1246,29 +1382,36 @@ export const Peer = { }, fromPartial, I>>(object: I): Peer { const message = createBasePeer(); - message.name = object.name ?? ""; + message.name = object.name ?? ''; message.type = object.type ?? 0; - message.snowflakeConfig = (object.snowflakeConfig !== undefined && object.snowflakeConfig !== null) - ? SnowflakeConfig.fromPartial(object.snowflakeConfig) - : undefined; - message.bigqueryConfig = (object.bigqueryConfig !== undefined && object.bigqueryConfig !== null) - ? BigqueryConfig.fromPartial(object.bigqueryConfig) - : undefined; - message.mongoConfig = (object.mongoConfig !== undefined && object.mongoConfig !== null) - ? MongoConfig.fromPartial(object.mongoConfig) - : undefined; - message.postgresConfig = (object.postgresConfig !== undefined && object.postgresConfig !== null) - ? PostgresConfig.fromPartial(object.postgresConfig) - : undefined; - message.eventhubConfig = (object.eventhubConfig !== undefined && object.eventhubConfig !== null) - ? EventHubConfig.fromPartial(object.eventhubConfig) - : undefined; - message.s3Config = (object.s3Config !== undefined && object.s3Config !== null) - ? S3Config.fromPartial(object.s3Config) - : undefined; - message.sqlserverConfig = (object.sqlserverConfig !== undefined && object.sqlserverConfig !== null) - ? SqlServerConfig.fromPartial(object.sqlserverConfig) - : undefined; + message.snowflakeConfig = + object.snowflakeConfig !== undefined && object.snowflakeConfig !== null + ? SnowflakeConfig.fromPartial(object.snowflakeConfig) + : undefined; + message.bigqueryConfig = + object.bigqueryConfig !== undefined && object.bigqueryConfig !== null + ? BigqueryConfig.fromPartial(object.bigqueryConfig) + : undefined; + message.mongoConfig = + object.mongoConfig !== undefined && object.mongoConfig !== null + ? MongoConfig.fromPartial(object.mongoConfig) + : undefined; + message.postgresConfig = + object.postgresConfig !== undefined && object.postgresConfig !== null + ? PostgresConfig.fromPartial(object.postgresConfig) + : undefined; + message.eventhubConfig = + object.eventhubConfig !== undefined && object.eventhubConfig !== null + ? EventHubConfig.fromPartial(object.eventhubConfig) + : undefined; + message.s3Config = + object.s3Config !== undefined && object.s3Config !== null + ? S3Config.fromPartial(object.s3Config) + : undefined; + message.sqlserverConfig = + object.sqlserverConfig !== undefined && object.sqlserverConfig !== null + ? SqlServerConfig.fromPartial(object.sqlserverConfig) + : undefined; return message; }, }; @@ -1277,35 +1420,52 @@ declare const self: any | undefined; declare const window: any | undefined; declare const global: any | undefined; const tsProtoGlobalThis: any = (() => { - if (typeof globalThis !== "undefined") { + if (typeof globalThis !== 'undefined') { return globalThis; } - if (typeof self !== "undefined") { + if (typeof self !== 'undefined') { return self; } - if (typeof window !== "undefined") { + if (typeof window !== 'undefined') { return window; } - if (typeof global !== "undefined") { + if (typeof global !== 'undefined') { return global; } - throw "Unable to locate global object"; + throw 'Unable to locate global object'; })(); -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; - -export type DeepPartial = T extends Builtin ? T - : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> - : T extends {} ? { [K in keyof T]?: DeepPartial } +type Builtin = + | Date + | Function + | Uint8Array + | string + | number + | boolean + | undefined; + +export type DeepPartial = T extends Builtin + ? T + : T extends Array + ? Array> + : T extends ReadonlyArray + ? ReadonlyArray> + : T extends {} + ? { [K in keyof T]?: DeepPartial } : Partial; type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin ? P - : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; +export type Exact = P extends Builtin + ? P + : P & { [K in keyof P]: Exact } & { + [K in Exclude>]: never; + }; function longToNumber(long: Long): number { if (long.gt(Number.MAX_SAFE_INTEGER)) { - throw new tsProtoGlobalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + throw new tsProtoGlobalThis.Error( + 'Value is larger than Number.MAX_SAFE_INTEGER' + ); } return long.toNumber(); } diff --git a/ui/grpc_generated/route.ts b/ui/grpc_generated/route.ts index 3abcee4687..7dc0c5e9ad 100644 --- a/ui/grpc_generated/route.ts +++ b/ui/grpc_generated/route.ts @@ -10,12 +10,12 @@ import { Metadata, ServiceError, UntypedServiceImplementation, -} from "@grpc/grpc-js"; -import _m0 from "protobufjs/minimal"; -import { FlowConnectionConfigs, QRepConfig } from "./flow"; -import { Peer } from "./peers"; +} from '@grpc/grpc-js'; +import _m0 from 'protobufjs/minimal'; +import { FlowConnectionConfigs, QRepConfig } from './flow'; +import { Peer } from './peers'; -export const protobufPackage = "peerdb_route"; +export const protobufPackage = 'peerdb_route'; export interface CreatePeerFlowRequest { connectionConfigs: FlowConnectionConfigs | undefined; @@ -45,8 +45,7 @@ export interface ShutdownResponse { errorMessage: string; } -export interface ListPeersRequest { -} +export interface ListPeersRequest {} export interface ListPeersResponse { peers: Peer[]; @@ -57,15 +56,25 @@ function createBaseCreatePeerFlowRequest(): CreatePeerFlowRequest { } export const CreatePeerFlowRequest = { - encode(message: CreatePeerFlowRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: CreatePeerFlowRequest, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.connectionConfigs !== undefined) { - FlowConnectionConfigs.encode(message.connectionConfigs, writer.uint32(10).fork()).ldelim(); + FlowConnectionConfigs.encode( + message.connectionConfigs, + writer.uint32(10).fork() + ).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CreatePeerFlowRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): CreatePeerFlowRequest { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseCreatePeerFlowRequest(); while (reader.pos < end) { @@ -76,7 +85,10 @@ export const CreatePeerFlowRequest = { break; } - message.connectionConfigs = FlowConnectionConfigs.decode(reader, reader.uint32()); + message.connectionConfigs = FlowConnectionConfigs.decode( + reader, + reader.uint32() + ); continue; } if ((tag & 7) === 4 || tag === 0) { @@ -98,37 +110,52 @@ export const CreatePeerFlowRequest = { toJSON(message: CreatePeerFlowRequest): unknown { const obj: any = {}; if (message.connectionConfigs !== undefined) { - obj.connectionConfigs = FlowConnectionConfigs.toJSON(message.connectionConfigs); + obj.connectionConfigs = FlowConnectionConfigs.toJSON( + message.connectionConfigs + ); } return obj; }, - create, I>>(base?: I): CreatePeerFlowRequest { + create, I>>( + base?: I + ): CreatePeerFlowRequest { return CreatePeerFlowRequest.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): CreatePeerFlowRequest { + fromPartial, I>>( + object: I + ): CreatePeerFlowRequest { const message = createBaseCreatePeerFlowRequest(); - message.connectionConfigs = (object.connectionConfigs !== undefined && object.connectionConfigs !== null) - ? FlowConnectionConfigs.fromPartial(object.connectionConfigs) - : undefined; + message.connectionConfigs = + object.connectionConfigs !== undefined && + object.connectionConfigs !== null + ? FlowConnectionConfigs.fromPartial(object.connectionConfigs) + : undefined; return message; }, }; function createBaseCreatePeerFlowResponse(): CreatePeerFlowResponse { - return { worflowId: "" }; + return { worflowId: '' }; } export const CreatePeerFlowResponse = { - encode(message: CreatePeerFlowResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.worflowId !== "") { + encode( + message: CreatePeerFlowResponse, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.worflowId !== '') { writer.uint32(10).string(message.worflowId); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CreatePeerFlowResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): CreatePeerFlowResponse { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseCreatePeerFlowResponse(); while (reader.pos < end) { @@ -151,23 +178,29 @@ export const CreatePeerFlowResponse = { }, fromJSON(object: any): CreatePeerFlowResponse { - return { worflowId: isSet(object.worflowId) ? String(object.worflowId) : "" }; + return { + worflowId: isSet(object.worflowId) ? String(object.worflowId) : '', + }; }, toJSON(message: CreatePeerFlowResponse): unknown { const obj: any = {}; - if (message.worflowId !== "") { + if (message.worflowId !== '') { obj.worflowId = message.worflowId; } return obj; }, - create, I>>(base?: I): CreatePeerFlowResponse { + create, I>>( + base?: I + ): CreatePeerFlowResponse { return CreatePeerFlowResponse.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): CreatePeerFlowResponse { + fromPartial, I>>( + object: I + ): CreatePeerFlowResponse { const message = createBaseCreatePeerFlowResponse(); - message.worflowId = object.worflowId ?? ""; + message.worflowId = object.worflowId ?? ''; return message; }, }; @@ -177,15 +210,22 @@ function createBaseCreateQRepFlowRequest(): CreateQRepFlowRequest { } export const CreateQRepFlowRequest = { - encode(message: CreateQRepFlowRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: CreateQRepFlowRequest, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.qrepConfig !== undefined) { QRepConfig.encode(message.qrepConfig, writer.uint32(10).fork()).ldelim(); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CreateQRepFlowRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): CreateQRepFlowRequest { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseCreateQRepFlowRequest(); while (reader.pos < end) { @@ -208,7 +248,11 @@ export const CreateQRepFlowRequest = { }, fromJSON(object: any): CreateQRepFlowRequest { - return { qrepConfig: isSet(object.qrepConfig) ? QRepConfig.fromJSON(object.qrepConfig) : undefined }; + return { + qrepConfig: isSet(object.qrepConfig) + ? QRepConfig.fromJSON(object.qrepConfig) + : undefined, + }; }, toJSON(message: CreateQRepFlowRequest): unknown { @@ -219,32 +263,44 @@ export const CreateQRepFlowRequest = { return obj; }, - create, I>>(base?: I): CreateQRepFlowRequest { + create, I>>( + base?: I + ): CreateQRepFlowRequest { return CreateQRepFlowRequest.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): CreateQRepFlowRequest { + fromPartial, I>>( + object: I + ): CreateQRepFlowRequest { const message = createBaseCreateQRepFlowRequest(); - message.qrepConfig = (object.qrepConfig !== undefined && object.qrepConfig !== null) - ? QRepConfig.fromPartial(object.qrepConfig) - : undefined; + message.qrepConfig = + object.qrepConfig !== undefined && object.qrepConfig !== null + ? QRepConfig.fromPartial(object.qrepConfig) + : undefined; return message; }, }; function createBaseCreateQRepFlowResponse(): CreateQRepFlowResponse { - return { worflowId: "" }; + return { worflowId: '' }; } export const CreateQRepFlowResponse = { - encode(message: CreateQRepFlowResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.worflowId !== "") { + encode( + message: CreateQRepFlowResponse, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.worflowId !== '') { writer.uint32(10).string(message.worflowId); } return writer; }, - decode(input: _m0.Reader | Uint8Array, length?: number): CreateQRepFlowResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + decode( + input: _m0.Reader | Uint8Array, + length?: number + ): CreateQRepFlowResponse { + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseCreateQRepFlowResponse(); while (reader.pos < end) { @@ -267,37 +323,51 @@ export const CreateQRepFlowResponse = { }, fromJSON(object: any): CreateQRepFlowResponse { - return { worflowId: isSet(object.worflowId) ? String(object.worflowId) : "" }; + return { + worflowId: isSet(object.worflowId) ? String(object.worflowId) : '', + }; }, toJSON(message: CreateQRepFlowResponse): unknown { const obj: any = {}; - if (message.worflowId !== "") { + if (message.worflowId !== '') { obj.worflowId = message.worflowId; } return obj; }, - create, I>>(base?: I): CreateQRepFlowResponse { + create, I>>( + base?: I + ): CreateQRepFlowResponse { return CreateQRepFlowResponse.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): CreateQRepFlowResponse { + fromPartial, I>>( + object: I + ): CreateQRepFlowResponse { const message = createBaseCreateQRepFlowResponse(); - message.worflowId = object.worflowId ?? ""; + message.worflowId = object.worflowId ?? ''; return message; }, }; function createBaseShutdownRequest(): ShutdownRequest { - return { workflowId: "", flowJobName: "", sourcePeer: undefined, destinationPeer: undefined }; + return { + workflowId: '', + flowJobName: '', + sourcePeer: undefined, + destinationPeer: undefined, + }; } export const ShutdownRequest = { - encode(message: ShutdownRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.workflowId !== "") { + encode( + message: ShutdownRequest, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { + if (message.workflowId !== '') { writer.uint32(10).string(message.workflowId); } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { writer.uint32(18).string(message.flowJobName); } if (message.sourcePeer !== undefined) { @@ -310,7 +380,8 @@ export const ShutdownRequest = { }, decode(input: _m0.Reader | Uint8Array, length?: number): ShutdownRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseShutdownRequest(); while (reader.pos < end) { @@ -355,19 +426,23 @@ export const ShutdownRequest = { fromJSON(object: any): ShutdownRequest { return { - workflowId: isSet(object.workflowId) ? String(object.workflowId) : "", - flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : "", - sourcePeer: isSet(object.sourcePeer) ? Peer.fromJSON(object.sourcePeer) : undefined, - destinationPeer: isSet(object.destinationPeer) ? Peer.fromJSON(object.destinationPeer) : undefined, + workflowId: isSet(object.workflowId) ? String(object.workflowId) : '', + flowJobName: isSet(object.flowJobName) ? String(object.flowJobName) : '', + sourcePeer: isSet(object.sourcePeer) + ? Peer.fromJSON(object.sourcePeer) + : undefined, + destinationPeer: isSet(object.destinationPeer) + ? Peer.fromJSON(object.destinationPeer) + : undefined, }; }, toJSON(message: ShutdownRequest): unknown { const obj: any = {}; - if (message.workflowId !== "") { + if (message.workflowId !== '') { obj.workflowId = message.workflowId; } - if (message.flowJobName !== "") { + if (message.flowJobName !== '') { obj.flowJobName = message.flowJobName; } if (message.sourcePeer !== undefined) { @@ -379,40 +454,50 @@ export const ShutdownRequest = { return obj; }, - create, I>>(base?: I): ShutdownRequest { + create, I>>( + base?: I + ): ShutdownRequest { return ShutdownRequest.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): ShutdownRequest { + fromPartial, I>>( + object: I + ): ShutdownRequest { const message = createBaseShutdownRequest(); - message.workflowId = object.workflowId ?? ""; - message.flowJobName = object.flowJobName ?? ""; - message.sourcePeer = (object.sourcePeer !== undefined && object.sourcePeer !== null) - ? Peer.fromPartial(object.sourcePeer) - : undefined; - message.destinationPeer = (object.destinationPeer !== undefined && object.destinationPeer !== null) - ? Peer.fromPartial(object.destinationPeer) - : undefined; + message.workflowId = object.workflowId ?? ''; + message.flowJobName = object.flowJobName ?? ''; + message.sourcePeer = + object.sourcePeer !== undefined && object.sourcePeer !== null + ? Peer.fromPartial(object.sourcePeer) + : undefined; + message.destinationPeer = + object.destinationPeer !== undefined && object.destinationPeer !== null + ? Peer.fromPartial(object.destinationPeer) + : undefined; return message; }, }; function createBaseShutdownResponse(): ShutdownResponse { - return { ok: false, errorMessage: "" }; + return { ok: false, errorMessage: '' }; } export const ShutdownResponse = { - encode(message: ShutdownResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: ShutdownResponse, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { if (message.ok === true) { writer.uint32(8).bool(message.ok); } - if (message.errorMessage !== "") { + if (message.errorMessage !== '') { writer.uint32(18).string(message.errorMessage); } return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): ShutdownResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseShutdownResponse(); while (reader.pos < end) { @@ -444,7 +529,9 @@ export const ShutdownResponse = { fromJSON(object: any): ShutdownResponse { return { ok: isSet(object.ok) ? Boolean(object.ok) : false, - errorMessage: isSet(object.errorMessage) ? String(object.errorMessage) : "", + errorMessage: isSet(object.errorMessage) + ? String(object.errorMessage) + : '', }; }, @@ -453,19 +540,23 @@ export const ShutdownResponse = { if (message.ok === true) { obj.ok = message.ok; } - if (message.errorMessage !== "") { + if (message.errorMessage !== '') { obj.errorMessage = message.errorMessage; } return obj; }, - create, I>>(base?: I): ShutdownResponse { + create, I>>( + base?: I + ): ShutdownResponse { return ShutdownResponse.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): ShutdownResponse { + fromPartial, I>>( + object: I + ): ShutdownResponse { const message = createBaseShutdownResponse(); message.ok = object.ok ?? false; - message.errorMessage = object.errorMessage ?? ""; + message.errorMessage = object.errorMessage ?? ''; return message; }, }; @@ -475,12 +566,16 @@ function createBaseListPeersRequest(): ListPeersRequest { } export const ListPeersRequest = { - encode(_: ListPeersRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + _: ListPeersRequest, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { return writer; }, decode(input: _m0.Reader | Uint8Array, length?: number): ListPeersRequest { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseListPeersRequest(); while (reader.pos < end) { @@ -504,10 +599,14 @@ export const ListPeersRequest = { return obj; }, - create, I>>(base?: I): ListPeersRequest { + create, I>>( + base?: I + ): ListPeersRequest { return ListPeersRequest.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(_: I): ListPeersRequest { + fromPartial, I>>( + _: I + ): ListPeersRequest { const message = createBaseListPeersRequest(); return message; }, @@ -518,7 +617,10 @@ function createBaseListPeersResponse(): ListPeersResponse { } export const ListPeersResponse = { - encode(message: ListPeersResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + encode( + message: ListPeersResponse, + writer: _m0.Writer = _m0.Writer.create() + ): _m0.Writer { for (const v of message.peers) { Peer.encode(v!, writer.uint32(10).fork()).ldelim(); } @@ -526,7 +628,8 @@ export const ListPeersResponse = { }, decode(input: _m0.Reader | Uint8Array, length?: number): ListPeersResponse { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + const reader = + input instanceof _m0.Reader ? input : _m0.Reader.create(input); let end = length === undefined ? reader.len : reader.pos + length; const message = createBaseListPeersResponse(); while (reader.pos < end) { @@ -549,7 +652,11 @@ export const ListPeersResponse = { }, fromJSON(object: any): ListPeersResponse { - return { peers: Array.isArray(object?.peers) ? object.peers.map((e: any) => Peer.fromJSON(e)) : [] }; + return { + peers: Array.isArray(object?.peers) + ? object.peers.map((e: any) => Peer.fromJSON(e)) + : [], + }; }, toJSON(message: ListPeersResponse): unknown { @@ -560,10 +667,14 @@ export const ListPeersResponse = { return obj; }, - create, I>>(base?: I): ListPeersResponse { + create, I>>( + base?: I + ): ListPeersResponse { return ListPeersResponse.fromPartial(base ?? ({} as any)); }, - fromPartial, I>>(object: I): ListPeersResponse { + fromPartial, I>>( + object: I + ): ListPeersResponse { const message = createBaseListPeersResponse(); message.peers = object.peers?.map((e) => Peer.fromPartial(e)) || []; return message; @@ -573,131 +684,184 @@ export const ListPeersResponse = { export type FlowServiceService = typeof FlowServiceService; export const FlowServiceService = { listPeers: { - path: "/peerdb_route.FlowService/ListPeers", + path: '/peerdb_route.FlowService/ListPeers', requestStream: false, responseStream: false, - requestSerialize: (value: ListPeersRequest) => Buffer.from(ListPeersRequest.encode(value).finish()), + requestSerialize: (value: ListPeersRequest) => + Buffer.from(ListPeersRequest.encode(value).finish()), requestDeserialize: (value: Buffer) => ListPeersRequest.decode(value), - responseSerialize: (value: ListPeersResponse) => Buffer.from(ListPeersResponse.encode(value).finish()), + responseSerialize: (value: ListPeersResponse) => + Buffer.from(ListPeersResponse.encode(value).finish()), responseDeserialize: (value: Buffer) => ListPeersResponse.decode(value), }, createPeerFlow: { - path: "/peerdb_route.FlowService/CreatePeerFlow", + path: '/peerdb_route.FlowService/CreatePeerFlow', requestStream: false, responseStream: false, - requestSerialize: (value: CreatePeerFlowRequest) => Buffer.from(CreatePeerFlowRequest.encode(value).finish()), + requestSerialize: (value: CreatePeerFlowRequest) => + Buffer.from(CreatePeerFlowRequest.encode(value).finish()), requestDeserialize: (value: Buffer) => CreatePeerFlowRequest.decode(value), - responseSerialize: (value: CreatePeerFlowResponse) => Buffer.from(CreatePeerFlowResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => CreatePeerFlowResponse.decode(value), + responseSerialize: (value: CreatePeerFlowResponse) => + Buffer.from(CreatePeerFlowResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => + CreatePeerFlowResponse.decode(value), }, createQRepFlow: { - path: "/peerdb_route.FlowService/CreateQRepFlow", + path: '/peerdb_route.FlowService/CreateQRepFlow', requestStream: false, responseStream: false, - requestSerialize: (value: CreateQRepFlowRequest) => Buffer.from(CreateQRepFlowRequest.encode(value).finish()), + requestSerialize: (value: CreateQRepFlowRequest) => + Buffer.from(CreateQRepFlowRequest.encode(value).finish()), requestDeserialize: (value: Buffer) => CreateQRepFlowRequest.decode(value), - responseSerialize: (value: CreateQRepFlowResponse) => Buffer.from(CreateQRepFlowResponse.encode(value).finish()), - responseDeserialize: (value: Buffer) => CreateQRepFlowResponse.decode(value), + responseSerialize: (value: CreateQRepFlowResponse) => + Buffer.from(CreateQRepFlowResponse.encode(value).finish()), + responseDeserialize: (value: Buffer) => + CreateQRepFlowResponse.decode(value), }, shutdownFlow: { - path: "/peerdb_route.FlowService/ShutdownFlow", + path: '/peerdb_route.FlowService/ShutdownFlow', requestStream: false, responseStream: false, - requestSerialize: (value: ShutdownRequest) => Buffer.from(ShutdownRequest.encode(value).finish()), + requestSerialize: (value: ShutdownRequest) => + Buffer.from(ShutdownRequest.encode(value).finish()), requestDeserialize: (value: Buffer) => ShutdownRequest.decode(value), - responseSerialize: (value: ShutdownResponse) => Buffer.from(ShutdownResponse.encode(value).finish()), + responseSerialize: (value: ShutdownResponse) => + Buffer.from(ShutdownResponse.encode(value).finish()), responseDeserialize: (value: Buffer) => ShutdownResponse.decode(value), }, } as const; export interface FlowServiceServer extends UntypedServiceImplementation { listPeers: handleUnaryCall; - createPeerFlow: handleUnaryCall; - createQRepFlow: handleUnaryCall; + createPeerFlow: handleUnaryCall< + CreatePeerFlowRequest, + CreatePeerFlowResponse + >; + createQRepFlow: handleUnaryCall< + CreateQRepFlowRequest, + CreateQRepFlowResponse + >; shutdownFlow: handleUnaryCall; } export interface FlowServiceClient extends Client { listPeers( request: ListPeersRequest, - callback: (error: ServiceError | null, response: ListPeersResponse) => void, + callback: (error: ServiceError | null, response: ListPeersResponse) => void ): ClientUnaryCall; listPeers( request: ListPeersRequest, metadata: Metadata, - callback: (error: ServiceError | null, response: ListPeersResponse) => void, + callback: (error: ServiceError | null, response: ListPeersResponse) => void ): ClientUnaryCall; listPeers( request: ListPeersRequest, metadata: Metadata, options: Partial, - callback: (error: ServiceError | null, response: ListPeersResponse) => void, + callback: (error: ServiceError | null, response: ListPeersResponse) => void ): ClientUnaryCall; createPeerFlow( request: CreatePeerFlowRequest, - callback: (error: ServiceError | null, response: CreatePeerFlowResponse) => void, + callback: ( + error: ServiceError | null, + response: CreatePeerFlowResponse + ) => void ): ClientUnaryCall; createPeerFlow( request: CreatePeerFlowRequest, metadata: Metadata, - callback: (error: ServiceError | null, response: CreatePeerFlowResponse) => void, + callback: ( + error: ServiceError | null, + response: CreatePeerFlowResponse + ) => void ): ClientUnaryCall; createPeerFlow( request: CreatePeerFlowRequest, metadata: Metadata, options: Partial, - callback: (error: ServiceError | null, response: CreatePeerFlowResponse) => void, + callback: ( + error: ServiceError | null, + response: CreatePeerFlowResponse + ) => void ): ClientUnaryCall; createQRepFlow( request: CreateQRepFlowRequest, - callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, + callback: ( + error: ServiceError | null, + response: CreateQRepFlowResponse + ) => void ): ClientUnaryCall; createQRepFlow( request: CreateQRepFlowRequest, metadata: Metadata, - callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, + callback: ( + error: ServiceError | null, + response: CreateQRepFlowResponse + ) => void ): ClientUnaryCall; createQRepFlow( request: CreateQRepFlowRequest, metadata: Metadata, options: Partial, - callback: (error: ServiceError | null, response: CreateQRepFlowResponse) => void, + callback: ( + error: ServiceError | null, + response: CreateQRepFlowResponse + ) => void ): ClientUnaryCall; shutdownFlow( request: ShutdownRequest, - callback: (error: ServiceError | null, response: ShutdownResponse) => void, + callback: (error: ServiceError | null, response: ShutdownResponse) => void ): ClientUnaryCall; shutdownFlow( request: ShutdownRequest, metadata: Metadata, - callback: (error: ServiceError | null, response: ShutdownResponse) => void, + callback: (error: ServiceError | null, response: ShutdownResponse) => void ): ClientUnaryCall; shutdownFlow( request: ShutdownRequest, metadata: Metadata, options: Partial, - callback: (error: ServiceError | null, response: ShutdownResponse) => void, + callback: (error: ServiceError | null, response: ShutdownResponse) => void ): ClientUnaryCall; } export const FlowServiceClient = makeGenericClientConstructor( FlowServiceService, - "peerdb_route.FlowService", + 'peerdb_route.FlowService' ) as unknown as { - new (address: string, credentials: ChannelCredentials, options?: Partial): FlowServiceClient; + new ( + address: string, + credentials: ChannelCredentials, + options?: Partial + ): FlowServiceClient; service: typeof FlowServiceService; }; -type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; - -export type DeepPartial = T extends Builtin ? T - : T extends Array ? Array> : T extends ReadonlyArray ? ReadonlyArray> - : T extends {} ? { [K in keyof T]?: DeepPartial } +type Builtin = + | Date + | Function + | Uint8Array + | string + | number + | boolean + | undefined; + +export type DeepPartial = T extends Builtin + ? T + : T extends Array + ? Array> + : T extends ReadonlyArray + ? ReadonlyArray> + : T extends {} + ? { [K in keyof T]?: DeepPartial } : Partial; type KeysOfUnion = T extends T ? keyof T : never; -export type Exact = P extends Builtin ? P - : P & { [K in keyof P]: Exact } & { [K in Exclude>]: never }; +export type Exact = P extends Builtin + ? P + : P & { [K in keyof P]: Exact } & { + [K in Exclude>]: never; + }; function isSet(value: any): boolean { return value !== null && value !== undefined;