From eaf864ccdfe297ce856e7d90e72cd3716a561129 Mon Sep 17 00:00:00 2001 From: Amogh Bharadwaj Date: Tue, 28 Nov 2023 22:17:21 +0530 Subject: [PATCH 01/12] Use pg12 for temporal (#730) This will enable advanced visibility and support other filters and search attributes --- docker-compose-dev.yml | 2 +- docker-compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index af7fed1fd6..c34083c819 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -56,7 +56,7 @@ services: catalog: condition: service_healthy environment: - - DB=postgresql + - DB=postgres12 - DB_PORT=5432 - POSTGRES_USER=postgres - POSTGRES_PWD=postgres diff --git a/docker-compose.yml b/docker-compose.yml index a4f2205cec..4b445e29c3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -49,7 +49,7 @@ services: catalog: condition: service_healthy environment: - - DB=postgresql + - DB=postgres12 - DB_PORT=5432 - POSTGRES_USER=postgres - POSTGRES_PWD=postgres From a4c028c993070edcc132aa8e621d321d9264bb7a Mon Sep 17 00:00:00 2001 From: Amogh Bharadwaj Date: Wed, 29 Nov 2023 01:02:12 +0530 Subject: [PATCH 02/12] Support for mirror name filter (#731) Fixes #696 We can now filter workflows by mirror name in Temporal UI Screenshot 2023-11-28 at 11 42 12 PM --- docker-compose-dev.yml | 3 +++ docker-compose.yml | 3 +++ flow/cmd/handler.go | 9 +++++++++ flow/shared/constants.go | 2 ++ flow/workflows/cdc_flow.go | 13 ++++++++++++- flow/workflows/qrep_flow.go | 3 +++ scripts/mirror-name-search.sh | 7 +++++++ 7 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 scripts/mirror-name-search.sh diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index c34083c819..6dbb170740 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -86,11 +86,14 @@ services: image: temporalio/admin-tools:1.22 stdin_open: true tty: true + entrypoint: ["bash", "/etc/temporal/entrypoint.sh"] healthcheck: test: ["CMD", "tctl", "workflow", "list"] interval: 1s timeout: 5s retries: 30 + volumes: + - ./scripts/mirror-name-search.sh:/etc/temporal/entrypoint.sh temporal-ui: container_name: temporal-ui diff --git a/docker-compose.yml b/docker-compose.yml index 4b445e29c3..440f06545d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -73,11 +73,14 @@ services: image: temporalio/admin-tools:1.22 stdin_open: true tty: true + entrypoint: ["bash", "/etc/temporal/entrypoint.sh"] healthcheck: test: ["CMD", "tctl", "workflow", "list"] interval: 1s timeout: 5s retries: 30 + volumes: + - ./scripts/mirror-name-search.sh:/etc/temporal/entrypoint.sh temporal-ui: container_name: temporal-ui diff --git a/flow/cmd/handler.go b/flow/cmd/handler.go index 8cd4951787..128bde255c 100644 --- a/flow/cmd/handler.go +++ b/flow/cmd/handler.go @@ -129,6 +129,9 @@ func (h *FlowRequestHandler) CreateCDCFlow( workflowOptions := client.StartWorkflowOptions{ ID: workflowID, TaskQueue: h.peerflowTaskQueueID, + SearchAttributes: map[string]interface{}{ + shared.MirrorNameSearchAttribute: cfg.FlowJobName, + }, } maxBatchSize := int(cfg.MaxBatchSize) @@ -229,6 +232,9 @@ func (h *FlowRequestHandler) CreateQRepFlow( workflowOptions := client.StartWorkflowOptions{ ID: workflowID, TaskQueue: h.peerflowTaskQueueID, + SearchAttributes: map[string]interface{}{ + shared.MirrorNameSearchAttribute: cfg.FlowJobName, + }, } if req.CreateCatalogEntry { err := h.createQrepJobEntry(ctx, req, workflowID) @@ -311,6 +317,9 @@ func (h *FlowRequestHandler) ShutdownFlow( workflowOptions := client.StartWorkflowOptions{ ID: workflowID, TaskQueue: h.peerflowTaskQueueID, + SearchAttributes: map[string]interface{}{ + shared.MirrorNameSearchAttribute: req.FlowJobName, + }, } dropFlowHandle, err := h.temporalClient.ExecuteWorkflow( ctx, // context diff --git a/flow/shared/constants.go b/flow/shared/constants.go index d1dfbdd6e1..48a6f8ae2f 100644 --- a/flow/shared/constants.go +++ b/flow/shared/constants.go @@ -11,6 +11,8 @@ const ( CDCFlowSignalName = "peer-flow-signal" ) +const MirrorNameSearchAttribute = "MirrorName" + type CDCFlowSignal int64 type ContextKey string diff --git a/flow/workflows/cdc_flow.go b/flow/workflows/cdc_flow.go index bbf134bf09..2dfe99ff25 100644 --- a/flow/workflows/cdc_flow.go +++ b/flow/workflows/cdc_flow.go @@ -189,6 +189,10 @@ func CDCFlowWorkflowWithConfig( } } + mirrorNameSearch := map[string]interface{}{ + shared.MirrorNameSearchAttribute: cfg.FlowJobName, + } + // start the SetupFlow workflow as a child workflow, and wait for it to complete // it should return the table schema for the source peer setupFlowID, err := GetChildWorkflowID(ctx, "setup-flow", cfg.FlowJobName) @@ -201,6 +205,7 @@ func CDCFlowWorkflowWithConfig( RetryPolicy: &temporal.RetryPolicy{ MaximumAttempts: 20, }, + SearchAttributes: mirrorNameSearch, } setupFlowCtx := workflow.WithChildOptions(ctx, childSetupFlowOpts) setupFlowFuture := workflow.ExecuteChildWorkflow(setupFlowCtx, SetupFlowWorkflow, cfg) @@ -226,7 +231,8 @@ func CDCFlowWorkflowWithConfig( RetryPolicy: &temporal.RetryPolicy{ MaximumAttempts: 20, }, - TaskQueue: taskQueue, + TaskQueue: taskQueue, + SearchAttributes: mirrorNameSearch, } snapshotFlowCtx := workflow.WithChildOptions(ctx, childSnapshotFlowOpts) snapshotFlowFuture := workflow.ExecuteChildWorkflow(snapshotFlowCtx, SnapshotFlowWorkflow, cfg) @@ -323,6 +329,9 @@ func CDCFlowWorkflowWithConfig( return state, err } + mirrorNameSearch := map[string]interface{}{ + shared.MirrorNameSearchAttribute: cfg.FlowJobName, + } // execute the sync flow as a child workflow childSyncFlowOpts := workflow.ChildWorkflowOptions{ WorkflowID: syncFlowID, @@ -330,6 +339,7 @@ func CDCFlowWorkflowWithConfig( RetryPolicy: &temporal.RetryPolicy{ MaximumAttempts: 20, }, + SearchAttributes: mirrorNameSearch, } ctx = workflow.WithChildOptions(ctx, childSyncFlowOpts) syncFlowOptions.RelationMessageMapping = *state.RelationMessageMapping @@ -362,6 +372,7 @@ func CDCFlowWorkflowWithConfig( RetryPolicy: &temporal.RetryPolicy{ MaximumAttempts: 20, }, + SearchAttributes: mirrorNameSearch, } ctx = workflow.WithChildOptions(ctx, childNormalizeFlowOpts) diff --git a/flow/workflows/qrep_flow.go b/flow/workflows/qrep_flow.go index 3b8e77a686..d56404af4a 100644 --- a/flow/workflows/qrep_flow.go +++ b/flow/workflows/qrep_flow.go @@ -206,6 +206,9 @@ func (q *QRepFlowExecution) startChildWorkflow( RetryPolicy: &temporal.RetryPolicy{ MaximumAttempts: 20, }, + SearchAttributes: map[string]interface{}{ + shared.MirrorNameSearchAttribute: q.config.FlowJobName, + }, }) future := workflow.ExecuteChildWorkflow( diff --git a/scripts/mirror-name-search.sh b/scripts/mirror-name-search.sh new file mode 100644 index 0000000000..c4f738a67a --- /dev/null +++ b/scripts/mirror-name-search.sh @@ -0,0 +1,7 @@ +# Check if MirrorName attribute exists +if ! temporal operator search-attribute list | grep -w MirrorName >/dev/null 2>&1; then + # If not, create MirrorName attribute + temporal operator search-attribute create --name MirrorName --type Text +fi + +tini -- sleep infinity From 2752413f297ef63a76e8d0b067694ab5f5c56a7d Mon Sep 17 00:00:00 2001 From: Kaushik Iska Date: Tue, 28 Nov 2023 14:47:57 -0500 Subject: [PATCH 03/12] quote the columns for exclusion to preserve case sensitivity (#733) --- flow/workflows/snapshot_flow.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/flow/workflows/snapshot_flow.go b/flow/workflows/snapshot_flow.go index 1c08a40fac..ae12ecc7a4 100644 --- a/flow/workflows/snapshot_flow.go +++ b/flow/workflows/snapshot_flow.go @@ -145,7 +145,11 @@ func (s *SnapshotFlowExecution) cloneTable( if len(mapping.Exclude) != 0 { for _, v := range s.config.TableNameSchemaMapping { if v.TableIdentifier == srcName { - from = strings.Join(maps.Keys(v.Columns), ",") + cols := maps.Keys(v.Columns) + for i, col := range cols { + cols[i] = fmt.Sprintf(`"%s"`, col) + } + from = strings.Join(cols, ",") break } } From ed514f4eeff0285d037a6e6283b1f29b45bbfb3f Mon Sep 17 00:00:00 2001 From: Amogh Bharadwaj Date: Wed, 29 Nov 2023 02:01:37 +0530 Subject: [PATCH 04/12] UI: Better postgres peer page (#732) - Displays connection string for postgres peer - Search bar for query in stats table --------- Co-authored-by: Kaushik Iska --- ui/app/peers/[peerName]/datatables.tsx | 170 ------------------------- ui/app/peers/[peerName]/page.tsx | 31 ++++- ui/app/peers/[peerName]/slottable.tsx | 75 +++++++++++ ui/app/peers/[peerName]/stattable.tsx | 110 ++++++++++++++++ ui/app/peers/[peerName]/style.ts | 22 ++++ ui/components/CopyButton.tsx | 5 +- 6 files changed, 241 insertions(+), 172 deletions(-) delete mode 100644 ui/app/peers/[peerName]/datatables.tsx create mode 100644 ui/app/peers/[peerName]/slottable.tsx create mode 100644 ui/app/peers/[peerName]/stattable.tsx create mode 100644 ui/app/peers/[peerName]/style.ts diff --git a/ui/app/peers/[peerName]/datatables.tsx b/ui/app/peers/[peerName]/datatables.tsx deleted file mode 100644 index f86f7c5536..0000000000 --- a/ui/app/peers/[peerName]/datatables.tsx +++ /dev/null @@ -1,170 +0,0 @@ -import { CopyButton } from '@/components/CopyButton'; -import TimeLabel from '@/components/TimeComponent'; -import { SlotInfo, StatInfo } from '@/grpc_generated/route'; -import { Label } from '@/lib/Label'; -import { Table, TableCell, TableRow } from '@/lib/Table'; -import { DurationDisplay, SlotNameDisplay } from './helpers'; - -export const SlotTable = ({ data }: { data: SlotInfo[] }) => { - return ( -
- -
- - {[ - 'Slot Name', - 'Active', - 'Redo LSN', - 'Restart LSN', - 'Lag (In MB)', - ].map((heading, index) => ( - - - - ))} - - } - > - {data.map(({ slotName, active, redoLSN, restartLSN, lagInMb }) => { - return ( - - - - - - - - - - - - - - - - - - ); - })} -
-
-
- ); -}; - -export const StatTable = ({ data }: { data: StatInfo[] }) => { - return ( -
- -
- - {[ - 'PID', - 'Duration', - 'Wait Event', - 'Wait Event Type', - 'Start Time', - 'Query', - ].map((heading, id) => ( - - - - ))} - - } - > - {data.map((stat) => ( - - - - - - - - - - - - - - - - - -
- {stat.query} - -
-
-
- ))} -
-
-
- ); -}; diff --git a/ui/app/peers/[peerName]/page.tsx b/ui/app/peers/[peerName]/page.tsx index 3a6eae42a7..4e88c5c3ee 100644 --- a/ui/app/peers/[peerName]/page.tsx +++ b/ui/app/peers/[peerName]/page.tsx @@ -1,15 +1,36 @@ +import { CopyButton } from '@/components/CopyButton'; import ReloadButton from '@/components/ReloadButton'; +import { PostgresConfig } from '@/grpc_generated/peers'; import { PeerSlotResponse, PeerStatResponse } from '@/grpc_generated/route'; import { Label } from '@/lib/Label'; import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; import Link from 'next/link'; -import { SlotTable, StatTable } from './datatables'; +import prisma from '../../utils/prisma'; +import SlotTable from './slottable'; +import StatTable from './stattable'; +import { connStringStyle } from './style'; export const dynamic = 'force-dynamic'; type DataConfigProps = { params: { peerName: string }; }; +async function fetchConnectionString(peerName: string) { + const config = await prisma.peers.findUnique({ + select: { + options: true, + }, + where: { + name: peerName, + }, + }); + if (config) { + const pgConfig = PostgresConfig.decode(config.options); + return `postgresql://${pgConfig.user}:${pgConfig.password}@${pgConfig.host}:${pgConfig.port}`; + } + return ''; +} + const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const getSlotData = async () => { const flowServiceAddr = GetFlowHttpAddressFromEnv(); @@ -50,6 +71,7 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const slots = await getSlotData(); const stats = await getStatData(); + const connectionString = await fetchConnectionString(peerName); return (
@@ -57,6 +79,13 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => {
{peerName}
+
+ +
+ {connectionString} + +
+
{slots && stats ? (
{ + return ( +
+ +
+ + {[ + 'Slot Name', + 'Active', + 'Redo LSN', + 'Restart LSN', + 'Lag (In MB)', + ].map((heading, index) => ( + + + + ))} + + } + > + {data.map(({ slotName, active, redoLSN, restartLSN, lagInMb }) => { + return ( + + + + + + + + + + + + + + + + + + ); + })} +
+
+
+ ); +}; + +export default SlotTable; diff --git a/ui/app/peers/[peerName]/stattable.tsx b/ui/app/peers/[peerName]/stattable.tsx new file mode 100644 index 0000000000..a3a3e80fe3 --- /dev/null +++ b/ui/app/peers/[peerName]/stattable.tsx @@ -0,0 +1,110 @@ +'use client'; +import { CopyButton } from '@/components/CopyButton'; +import TimeLabel from '@/components/TimeComponent'; +import { StatInfo } from '@/grpc_generated/route'; +import { Label } from '@/lib/Label'; +import { SearchField } from '@/lib/SearchField'; +import { Table, TableCell, TableRow } from '@/lib/Table'; +import { useMemo, useState } from 'react'; +import { DurationDisplay } from './helpers'; +import { tableStyle } from './style'; + +const StatTable = ({ data }: { data: StatInfo[] }) => { + const [search, setSearch] = useState(''); + const filteredData = useMemo(() => { + return data.filter((stat) => { + return stat.query.toLowerCase().includes(search.toLowerCase()); + }); + }, [data, search]); + + return ( +
+ +
+ + {[ + 'PID', + 'Duration', + 'Wait Event', + 'Wait Event Type', + 'Start Time', + 'Query', + ].map((heading, id) => ( + + + + ))} + + } + toolbar={{ + left: <>, + right: ( + ) => + setSearch(e.target.value) + } + /> + ), + }} + > + {filteredData.map((stat) => ( + + + + + + + + + + + + + + + + + +
+ {stat.query} + +
+
+
+ ))} +
+
+
+ ); +}; + +export default StatTable; diff --git a/ui/app/peers/[peerName]/style.ts b/ui/app/peers/[peerName]/style.ts new file mode 100644 index 0000000000..1afdbb86b0 --- /dev/null +++ b/ui/app/peers/[peerName]/style.ts @@ -0,0 +1,22 @@ +import { CSSProperties } from 'styled-components'; + +export const tableStyle = { + maxHeight: '100%', + overflow: 'scroll', + padding: '0.5rem', + borderRadius: '0.5rem', + border: '1px solid rgba(0,0,0,0.1)', +}; + +export const connStringStyle: CSSProperties = { + backgroundColor: 'white', + display: 'flex', + width: 'fit-content', + alignItems: 'center', + padding: '0.5rem', + border: '1px solid rgba(0,0,0,0.1)', + borderRadius: '0.5rem', + marginTop: '0.5rem', + fontFamily: 'monospace', + whiteSpace: 'pre-wrap', +}; diff --git a/ui/components/CopyButton.tsx b/ui/components/CopyButton.tsx index bb1334564f..860dd574a2 100644 --- a/ui/components/CopyButton.tsx +++ b/ui/components/CopyButton.tsx @@ -13,7 +13,10 @@ export const CopyButton = ({ text }: { text: string }) => { From fdd497960a4aae259735e3ffdb67282f13aee7bc Mon Sep 17 00:00:00 2001 From: Kaushik Iska Date: Tue, 28 Nov 2023 15:45:59 -0500 Subject: [PATCH 05/12] Prepend PEERDB_DEPLOYMENT_UID only if its set (#735) --- flow/connectors/utils/env.go | 11 +++++++++++ flow/shared/constants.go | 16 ++++++++++++---- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/flow/connectors/utils/env.go b/flow/connectors/utils/env.go index 2911e3d8ef..6d3065dae4 100644 --- a/flow/connectors/utils/env.go +++ b/flow/connectors/utils/env.go @@ -45,3 +45,14 @@ func GetEnvInt(name string, defaultValue int) int { return i } + +// GetEnvString returns the value of the environment variable with the given name +// or defaultValue if the environment variable is not set. +func GetEnvString(name string, defaultValue string) string { + val, ok := GetEnv(name) + if !ok { + return defaultValue + } + + return val +} diff --git a/flow/shared/constants.go b/flow/shared/constants.go index 48a6f8ae2f..a10d529189 100644 --- a/flow/shared/constants.go +++ b/flow/shared/constants.go @@ -2,7 +2,8 @@ package shared import ( "fmt" - "os" + + "github.com/PeerDB-io/peer-flow/connectors/utils" ) const ( @@ -34,13 +35,20 @@ const ( const FetchAndChannelSize = 256 * 1024 func GetPeerFlowTaskQueueName(taskQueueID TaskQueueID) (string, error) { - deploymentUID := os.Getenv("PEERDB_DEPLOYMENT_UID") switch taskQueueID { case PeerFlowTaskQueueID: - return deploymentUID + "-" + peerFlowTaskQueue, nil + return prependUIDToTaskQueueName(peerFlowTaskQueue), nil case SnapshotFlowTaskQueueID: - return deploymentUID + "-" + snapshotFlowTaskQueue, nil + return prependUIDToTaskQueueName(snapshotFlowTaskQueue), nil default: return "", fmt.Errorf("unknown task queue id %d", taskQueueID) } } + +func prependUIDToTaskQueueName(taskQueueName string) string { + deploymentUID := utils.GetEnvString("PEERDB_DEPLOYMENT_UID", "") + if deploymentUID == "" { + return taskQueueName + } + return fmt.Sprintf("%s-%s", deploymentUID, taskQueueName) +} From f5155301bb6124522f59e40f9857384f33cd78ca Mon Sep 17 00:00:00 2001 From: Amogh Bharadwaj Date: Wed, 29 Nov 2023 14:58:32 +0530 Subject: [PATCH 06/12] More reliable mirror name search attribute script (#737) Noticed that temporal was not ready yet and so temporal-admin-tools could not list attributes Now sleeping for 5 seconds. Tested it and now the above issue wasn't reproducible by me Also adding `-s` to tini command as suggested by warnings --- scripts/mirror-name-search.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/mirror-name-search.sh b/scripts/mirror-name-search.sh index c4f738a67a..da50963d72 100644 --- a/scripts/mirror-name-search.sh +++ b/scripts/mirror-name-search.sh @@ -1,7 +1,9 @@ +sleep 5 + # Check if MirrorName attribute exists if ! temporal operator search-attribute list | grep -w MirrorName >/dev/null 2>&1; then # If not, create MirrorName attribute - temporal operator search-attribute create --name MirrorName --type Text + temporal operator search-attribute create --name MirrorName --type Text --namespace default fi -tini -- sleep infinity +tini -s -- sleep infinity From b0076495d4ed0835faefe8a653da73859df49a80 Mon Sep 17 00:00:00 2001 From: Amogh Bharadwaj Date: Thu, 30 Nov 2023 01:01:01 +0530 Subject: [PATCH 07/12] Use number of records synced as decider for CDC tests (#738) --- flow/cmd/handler.go | 1 + flow/connectors/eventhub/eventhub.go | 19 ++++--- flow/connectors/postgres/cdc.go | 73 ++++++++++++++++++++----- flow/e2e/bigquery/peer_flow_bq_test.go | 60 ++++++++++---------- flow/e2e/postgres/peer_flow_pg_test.go | 20 +++---- flow/e2e/s3/cdc_s3_test.go | 15 +++-- flow/e2e/snowflake/peer_flow_sf_test.go | 69 +++++++++++------------ flow/workflows/cdc_flow.go | 12 ++++ flow/workflows/sync_flow.go | 2 +- 9 files changed, 164 insertions(+), 107 deletions(-) diff --git a/flow/cmd/handler.go b/flow/cmd/handler.go index 128bde255c..44fb5f01a6 100644 --- a/flow/cmd/handler.go +++ b/flow/cmd/handler.go @@ -142,6 +142,7 @@ func (h *FlowRequestHandler) CreateCDCFlow( limits := &peerflow.CDCFlowLimits{ TotalSyncFlows: 0, + ExitAfterRecords: -1, TotalNormalizeFlows: 0, MaxBatchSize: maxBatchSize, } diff --git a/flow/connectors/eventhub/eventhub.go b/flow/connectors/eventhub/eventhub.go index 6c0f6b4ac9..73c8ed528c 100644 --- a/flow/connectors/eventhub/eventhub.go +++ b/flow/connectors/eventhub/eventhub.go @@ -211,15 +211,6 @@ func (c *EventHubConnector) processBatch( } func (c *EventHubConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.SyncResponse, error) { - shutdown := utils.HeartbeatRoutine(c.ctx, 10*time.Second, func() string { - return fmt.Sprintf("syncing records to eventhub with"+ - " push parallelism %d and push batch size %d", - req.PushParallelism, req.PushBatchSize) - }) - defer func() { - shutdown <- true - }() - maxParallelism := req.PushParallelism if maxParallelism <= 0 { maxParallelism = 10 @@ -229,6 +220,16 @@ func (c *EventHubConnector) SyncRecords(req *model.SyncRecordsRequest) (*model.S batch := req.Records var numRecords uint32 + shutdown := utils.HeartbeatRoutine(c.ctx, 10*time.Second, func() string { + return fmt.Sprintf( + "processed %d records for flow %s", + numRecords, req.FlowJobName, + ) + }) + defer func() { + shutdown <- true + }() + // if env var PEERDB_BETA_EVENTHUB_PUSH_ASYNC=true // we kick off processBatch in a goroutine and return immediately. // otherwise, we block until processBatch is done. diff --git a/flow/connectors/postgres/cdc.go b/flow/connectors/postgres/cdc.go index 552f372a04..cfb0425515 100644 --- a/flow/connectors/postgres/cdc.go +++ b/flow/connectors/postgres/cdc.go @@ -164,9 +164,6 @@ func (p *PostgresCDCSource) consumeStream( clientXLogPos pglogrepl.LSN, records *model.CDCRecordStream, ) error { - standbyMessageTimeout := req.IdleTimeout - nextStandbyMessageDeadline := time.Now().Add(standbyMessageTimeout) - defer func() { err := conn.Close(p.ctx) if err != nil { @@ -211,6 +208,8 @@ func (p *PostgresCDCSource) consumeStream( }() tablePKeyLastSeen := make(map[model.TableWithPkey]int) + standbyMessageTimeout := req.IdleTimeout + nextStandbyMessageDeadline := time.Now().Add(standbyMessageTimeout) addRecord := func(rec model.Record) { records.AddRecord(rec) @@ -218,12 +217,17 @@ func (p *PostgresCDCSource) consumeStream( if len(localRecords) == 1 { records.SignalAsNotEmpty() + log.Infof("pushing the standby deadline to %s", time.Now().Add(standbyMessageTimeout)) + log.Infof("num records accumulated: %d", len(localRecords)) + nextStandbyMessageDeadline = time.Now().Add(standbyMessageTimeout) } } + pkmRequiresResponse := false + waitingForCommit := false + for { - if time.Now().After(nextStandbyMessageDeadline) || - (len(localRecords) >= int(req.MaxBatchSize)) { + if pkmRequiresResponse { // Update XLogPos to the last processed position, we can only confirm // that this is the last row committed on the destination. err := pglogrepl.SendStandbyStatusUpdate(p.ctx, conn, @@ -232,26 +236,64 @@ func (p *PostgresCDCSource) consumeStream( return fmt.Errorf("SendStandbyStatusUpdate failed: %w", err) } - numRowsProcessedMessage := fmt.Sprintf("processed %d rows", len(localRecords)) - if time.Since(standByLastLogged) > 10*time.Second { + numRowsProcessedMessage := fmt.Sprintf("processed %d rows", len(localRecords)) log.Infof("Sent Standby status message. %s", numRowsProcessedMessage) standByLastLogged = time.Now() } - nextStandbyMessageDeadline = time.Now().Add(standbyMessageTimeout) + pkmRequiresResponse = false + } - if !p.commitLock && (len(localRecords) >= int(req.MaxBatchSize)) { - return nil + if (len(localRecords) >= int(req.MaxBatchSize)) && !p.commitLock { + return nil + } + + if waitingForCommit && !p.commitLock { + log.Infof( + "[%s] commit received, returning currently accumulated records - %d", + req.FlowJobName, + len(localRecords), + ) + return nil + } + + // if we are past the next standby deadline (?) + if time.Now().After(nextStandbyMessageDeadline) { + if len(localRecords) > 0 { + log.Infof("[%s] standby deadline reached, have %d records, will return at next commit", + req.FlowJobName, + len(localRecords), + ) + + if !p.commitLock { + // immediate return if we are not waiting for a commit + return nil + } + + waitingForCommit = true + } else { + log.Infof("[%s] standby deadline reached, no records accumulated, continuing to wait", + req.FlowJobName, + ) } + nextStandbyMessageDeadline = time.Now().Add(standbyMessageTimeout) + } + + var ctx context.Context + var cancel context.CancelFunc + + if len(localRecords) == 0 { + ctx, cancel = context.WithCancel(p.ctx) + } else { + ctx, cancel = context.WithDeadline(p.ctx, nextStandbyMessageDeadline) } - ctx, cancel := context.WithDeadline(p.ctx, nextStandbyMessageDeadline) rawMsg, err := conn.ReceiveMessage(ctx) cancel() if err != nil && !p.commitLock { if pgconn.Timeout(err) { - log.Infof("Idle timeout reached, returning currently accumulated records - %d", len(localRecords)) + log.Infof("Stand-by deadline reached, returning currently accumulated records - %d", len(localRecords)) return nil } else { return fmt.Errorf("ReceiveMessage failed: %w", err) @@ -281,9 +323,10 @@ func (p *PostgresCDCSource) consumeStream( if pkm.ServerWALEnd > clientXLogPos { clientXLogPos = pkm.ServerWALEnd } - if pkm.ReplyRequested { - nextStandbyMessageDeadline = time.Time{} - } + + // always reply to keepalive messages + // instead of `pkm.ReplyRequested` + pkmRequiresResponse = true case pglogrepl.XLogDataByteID: xld, err := pglogrepl.ParseXLogData(msg.Data[1:]) diff --git a/flow/e2e/bigquery/peer_flow_bq_test.go b/flow/e2e/bigquery/peer_flow_bq_test.go index 1618e84326..ba8c2cb54c 100644 --- a/flow/e2e/bigquery/peer_flow_bq_test.go +++ b/flow/e2e/bigquery/peer_flow_bq_test.go @@ -111,8 +111,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Invalid_Connection_Config() { // TODO (kaushikiska): ensure flow name can only be alpha numeric and underscores. limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 1, - MaxBatchSize: 1, + ExitAfterRecords: 0, + MaxBatchSize: 1, } env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, nil, &limits, nil) @@ -156,8 +156,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Complete_Flow_No_Data() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 1, - MaxBatchSize: 1, + ExitAfterRecords: 0, + MaxBatchSize: 1, } env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) @@ -201,8 +201,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Char_ColType_Error() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 1, - MaxBatchSize: 1, + ExitAfterRecords: 0, + MaxBatchSize: 1, } env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) @@ -249,8 +249,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Complete_Simple_Flow_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -318,8 +318,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Toast_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 4, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -387,8 +387,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Toast_Nochanges_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 0, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -449,8 +449,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Toast_Advance_1_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 1, - MaxBatchSize: 100, + ExitAfterRecords: 11, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -523,8 +523,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Toast_Advance_2_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 6, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -592,8 +592,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Toast_Advance_3_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 4, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -661,8 +661,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Types_BQ() { limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 1, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -737,8 +737,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Multi_Table_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 2, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -799,8 +799,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Simple_Schema_Changes_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 10, - MaxBatchSize: 100, + ExitAfterRecords: 1, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -903,8 +903,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Composite_PKey_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -978,8 +978,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Composite_PKey_Toast_1_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 20, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -1056,8 +1056,8 @@ func (s *PeerFlowE2ETestSuiteBQ) Test_Composite_PKey_Toast_2_BQ() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup diff --git a/flow/e2e/postgres/peer_flow_pg_test.go b/flow/e2e/postgres/peer_flow_pg_test.go index 45666f0369..3f5718bb72 100644 --- a/flow/e2e/postgres/peer_flow_pg_test.go +++ b/flow/e2e/postgres/peer_flow_pg_test.go @@ -45,8 +45,8 @@ func (s *PeerFlowE2ETestSuitePG) Test_Simple_Flow_PG() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -107,8 +107,8 @@ func (s *PeerFlowE2ETestSuitePG) Test_Simple_Schema_Changes_PG() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 10, - MaxBatchSize: 100, + ExitAfterRecords: 1, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -271,8 +271,8 @@ func (s *PeerFlowE2ETestSuitePG) Test_Composite_PKey_PG() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 4, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -350,8 +350,8 @@ func (s *PeerFlowE2ETestSuitePG) Test_Composite_PKey_Toast_1_PG() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 20, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -431,8 +431,8 @@ func (s *PeerFlowE2ETestSuitePG) Test_Composite_PKey_Toast_2_PG() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 4, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup diff --git a/flow/e2e/s3/cdc_s3_test.go b/flow/e2e/s3/cdc_s3_test.go index aaca2a125a..85993e2fe8 100644 --- a/flow/e2e/s3/cdc_s3_test.go +++ b/flow/e2e/s3/cdc_s3_test.go @@ -24,7 +24,7 @@ func (s *PeerFlowE2ETestSuiteS3) Test_Complete_Simple_Flow_S3() { srcTableName := s.attachSchemaSuffix("test_simple_flow_s3") dstTableName := fmt.Sprintf("%s.%s", "peerdb_test_s3", "test_simple_flow_s3") - flowJobName := s.attachSuffix("test_simple_flow") + flowJobName := s.attachSuffix("test_simple_flow_s3") _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` CREATE TABLE %s ( id SERIAL PRIMARY KEY, @@ -44,8 +44,9 @@ func (s *PeerFlowE2ETestSuiteS3) Test_Complete_Simple_Flow_S3() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 5, - MaxBatchSize: 5, + TotalSyncFlows: 4, + ExitAfterRecords: 20, + MaxBatchSize: 5, } go func() { @@ -95,7 +96,7 @@ func (s *PeerFlowE2ETestSuiteS3) Test_Complete_Simple_Flow_GCS_Interop() { srcTableName := s.attachSchemaSuffix("test_simple_flow_gcs_interop") dstTableName := fmt.Sprintf("%s.%s", "peerdb_test_gcs_interop", "test_simple_flow_gcs_interop") - flowJobName := s.attachSuffix("test_simple_flow") + flowJobName := s.attachSuffix("test_simple_flow_gcs_interop") _, err := s.pool.Exec(context.Background(), fmt.Sprintf(` CREATE TABLE %s ( id SERIAL PRIMARY KEY, @@ -115,8 +116,9 @@ func (s *PeerFlowE2ETestSuiteS3) Test_Complete_Simple_Flow_GCS_Interop() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 5, - MaxBatchSize: 5, + TotalSyncFlows: 4, + ExitAfterRecords: 20, + MaxBatchSize: 5, } go func() { @@ -131,6 +133,7 @@ func (s *PeerFlowE2ETestSuiteS3) Test_Complete_Simple_Flow_GCS_Interop() { `, srcTableName), testKey, testValue) s.NoError(err) } + fmt.Println("Inserted 20 rows into the source table") s.NoError(err) }() diff --git a/flow/e2e/snowflake/peer_flow_sf_test.go b/flow/e2e/snowflake/peer_flow_sf_test.go index e489545e3f..e5b7c588f9 100644 --- a/flow/e2e/snowflake/peer_flow_sf_test.go +++ b/flow/e2e/snowflake/peer_flow_sf_test.go @@ -149,15 +149,15 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_Simple_Flow_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 20, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup - // and then insert 15 rows into the source table + // and then insert 20 rows into the source table go func() { e2e.SetupCDCFlowStatusQuery(env, connectionGen) - // insert 15 rows into the source table + // insert 20 rows into the source table for i := 0; i < 20; i++ { testKey := fmt.Sprintf("test_key_%d", i) testValue := fmt.Sprintf("test_value_%d", i) @@ -166,7 +166,7 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Complete_Simple_Flow_SF() { `, srcTableName), testKey, testValue) s.NoError(err) } - fmt.Println("Inserted 10 rows into the source table") + fmt.Println("Inserted 20 rows into the source table") }() env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) @@ -225,15 +225,15 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Invalid_Geo_SF_Avro_CDC() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup // and then insert 10 rows into the source table go func() { e2e.SetupCDCFlowStatusQuery(env, connectionGen) - // insert 10 rows into the source table + // insert 4 invalid shapes and 6 valid shapes into the source table for i := 0; i < 4; i++ { _, err = s.pool.Exec(context.Background(), fmt.Sprintf(` INSERT INTO %s (line,poly) VALUES ($1,$2) @@ -269,7 +269,7 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Invalid_Geo_SF_Avro_CDC() { s.Contains(err.Error(), "continue as new") // We inserted 4 invalid shapes in each. - // They should have filtered out as null on destination + // They should have been filtered out as null on destination lineCount, err := s.sfHelper.CountNonNullRows("test_invalid_geo_sf_avro_cdc", "line") s.NoError(err) s.Equal(6, lineCount) @@ -312,8 +312,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Toast_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 4, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -382,12 +382,10 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Toast_Nochanges_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 0, + MaxBatchSize: 100, } - // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup - // and execute a transaction touching toast columns go func() { e2e.SetupCDCFlowStatusQuery(env, connectionGen) /* transaction updating no rows */ @@ -398,7 +396,6 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Toast_Nochanges_SF() { END; `, srcTableName, srcTableName)) s.NoError(err) - fmt.Println("Executed a transaction touching toast columns") }() env.ExecuteWorkflow(peerflow.CDCFlowWorkflowWithConfig, flowConnConfig, &limits, nil) @@ -444,8 +441,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Toast_Advance_1_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 11, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -518,8 +515,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Toast_Advance_2_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 6, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -587,8 +584,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Toast_Advance_3_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 4, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -656,8 +653,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Types_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 1, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -731,8 +728,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Multi_Table_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 2, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -790,8 +787,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Simple_Schema_Changes_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 10, - MaxBatchSize: 100, + ExitAfterRecords: 1, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -954,8 +951,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Composite_PKey_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 5, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -1029,8 +1026,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Composite_PKey_Toast_1_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 20, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -1106,8 +1103,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Composite_PKey_Toast_2_SF() { s.NoError(err) limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 4, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup @@ -1187,8 +1184,8 @@ func (s *PeerFlowE2ETestSuiteSF) Test_Column_Exclusion() { } limits := peerflow.CDCFlowLimits{ - TotalSyncFlows: 2, - MaxBatchSize: 100, + ExitAfterRecords: 10, + MaxBatchSize: 100, } // in a separate goroutine, wait for PeerFlowStatusQuery to finish setup diff --git a/flow/workflows/cdc_flow.go b/flow/workflows/cdc_flow.go index 2dfe99ff25..eb9b42cc03 100644 --- a/flow/workflows/cdc_flow.go +++ b/flow/workflows/cdc_flow.go @@ -33,6 +33,8 @@ type CDCFlowLimits struct { TotalNormalizeFlows int // Maximum number of rows in a sync flow batch. MaxBatchSize int + // Rows synced after which we can say a test is done. + ExitAfterRecords int } type CDCFlowWorkflowState struct { @@ -289,6 +291,7 @@ func CDCFlowWorkflowWithConfig( } currentSyncFlowNum := 0 + totalRecordsSynced := 0 for { // check and act on signals before a fresh flow starts. @@ -324,6 +327,12 @@ func CDCFlowWorkflowWithConfig( } currentSyncFlowNum++ + // check if total records synced have been completed + if totalRecordsSynced == limits.ExitAfterRecords { + w.logger.Warn("All the records have been synced successfully, so ending the flow") + break + } + syncFlowID, err := GetChildWorkflowID(ctx, "sync-flow", cfg.FlowJobName) if err != nil { return state, err @@ -358,9 +367,12 @@ func CDCFlowWorkflowWithConfig( state.SyncFlowStatuses = append(state.SyncFlowStatuses, childSyncFlowRes) if childSyncFlowRes != nil { state.RelationMessageMapping = childSyncFlowRes.RelationMessageMapping + totalRecordsSynced += int(childSyncFlowRes.NumRecordsSynced) } } + w.logger.Info("Total records synced: ", totalRecordsSynced) + normalizeFlowID, err := GetChildWorkflowID(ctx, "normalize-flow", cfg.FlowJobName) if err != nil { return state, err diff --git a/flow/workflows/sync_flow.go b/flow/workflows/sync_flow.go index d59de7cf69..0b207bc65f 100644 --- a/flow/workflows/sync_flow.go +++ b/flow/workflows/sync_flow.go @@ -83,7 +83,7 @@ func (s *SyncFlowExecution) executeSyncFlow( startFlowCtx := workflow.WithActivityOptions(ctx, workflow.ActivityOptions{ StartToCloseTimeout: 72 * time.Hour, - HeartbeatTimeout: 5 * time.Minute, + HeartbeatTimeout: 30 * time.Second, }) // execute StartFlow on the peers to start the flow From f2cf96afb88ffadd0995dfa3669f18e0fa47e1db Mon Sep 17 00:00:00 2001 From: Kaushik Iska Date: Wed, 29 Nov 2023 16:38:54 -0800 Subject: [PATCH 08/12] add better logging for cdc flow (#740) --- flow/cmd/handler.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/flow/cmd/handler.go b/flow/cmd/handler.go index 44fb5f01a6..99c25cbd28 100644 --- a/flow/cmd/handler.go +++ b/flow/cmd/handler.go @@ -164,6 +164,7 @@ func (h *FlowRequestHandler) CreateCDCFlow( if req.CreateCatalogEntry { err := h.createCdcJobEntry(ctx, req, workflowID) if err != nil { + log.Errorf("unable to create flow job entry: %v", err) return nil, fmt.Errorf("unable to create flow job entry: %w", err) } } @@ -171,6 +172,7 @@ func (h *FlowRequestHandler) CreateCDCFlow( var err error err = h.updateFlowConfigInCatalog(cfg) if err != nil { + log.Errorf("unable to update flow config in catalog: %v", err) return nil, fmt.Errorf("unable to update flow config in catalog: %w", err) } @@ -184,6 +186,7 @@ func (h *FlowRequestHandler) CreateCDCFlow( state, // workflow state ) if err != nil { + log.Errorf("unable to start PeerFlow workflow: %v", err) return nil, fmt.Errorf("unable to start PeerFlow workflow: %w", err) } From ab3ec4c1793c3b4cf168ca3d0de3892afd6b8cb4 Mon Sep 17 00:00:00 2001 From: Kevin Biju <52661649+heavycrystal@users.noreply.github.com> Date: Thu, 30 Nov 2023 13:59:18 +0530 Subject: [PATCH 09/12] filtering system schemas from all tables and all schemas queries (#741) --- flow/cmd/peer_data.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/flow/cmd/peer_data.go b/flow/cmd/peer_data.go index 14c8ba43cc..8aecff6229 100644 --- a/flow/cmd/peer_data.go +++ b/flow/cmd/peer_data.go @@ -44,7 +44,7 @@ func (h *FlowRequestHandler) GetSchemas( defer peerPool.Close() rows, err := peerPool.Query(ctx, "SELECT schema_name"+ - " FROM information_schema.schemata;") + " FROM information_schema.schemata WHERE schema_name !~ '^pg_' AND schema_name <> 'information_schema';") if err != nil { return &protos.PeerSchemasResponse{Schemas: nil}, err } @@ -106,7 +106,7 @@ func (h *FlowRequestHandler) GetAllTables( defer peerPool.Close() rows, err := peerPool.Query(ctx, "SELECT table_schema || '.' || table_name AS schema_table "+ - "FROM information_schema.tables;") + "FROM information_schema.tables WHERE table_schema !~ '^pg_' AND table_schema <> 'information_schema'") if err != nil { return &protos.AllTablesResponse{Tables: nil}, err } From 505e5249c20a701145646314946b1a46075b3ff0 Mon Sep 17 00:00:00 2001 From: Kevin Biju <52661649+heavycrystal@users.noreply.github.com> Date: Thu, 30 Nov 2023 18:05:06 +0530 Subject: [PATCH 10/12] added Dropdown to filter peers by type (#742) --- ui/app/peers/peersTable.tsx | 57 +++++++++++++++++++++++------ ui/components/PeerTypeComponent.tsx | 4 +- ui/tsconfig.json | 2 +- 3 files changed, 48 insertions(+), 15 deletions(-) diff --git a/ui/app/peers/peersTable.tsx b/ui/app/peers/peersTable.tsx index 1511443767..cf64635cd0 100644 --- a/ui/app/peers/peersTable.tsx +++ b/ui/app/peers/peersTable.tsx @@ -1,12 +1,15 @@ 'use client'; import { DropDialog } from '@/components/DropDialog'; import PeerButton from '@/components/PeerComponent'; -import PeerTypeLabel from '@/components/PeerTypeComponent'; -import { Peer } from '@/grpc_generated/peers'; +import PeerTypeLabel, { + DBTypeToGoodText, +} from '@/components/PeerTypeComponent'; +import { DBType, Peer } from '@/grpc_generated/peers'; import { Label } from '@/lib/Label'; import { SearchField } from '@/lib/SearchField'; import { Table, TableCell, TableRow } from '@/lib/Table'; import { useMemo, useState } from 'react'; +import ReactSelect from 'react-select'; function PeerRow({ peer }: { peer: Peer }) { return ( @@ -31,13 +34,33 @@ function PeerRow({ peer }: { peer: Peer }) { function PeersTable({ title, peers }: { title: string; peers: Peer[] }) { const [searchQuery, setSearchQuery] = useState(''); + const [filteredType, setFilteredType] = useState( + undefined + ); const rows = useMemo( () => - peers.filter((peer) => { - return peer.name.toLowerCase().includes(searchQuery.toLowerCase()); - }), - [peers, searchQuery] + peers + .filter((peer) => { + return peer.name.toLowerCase().includes(searchQuery.toLowerCase()); + }) + .filter((peer) => { + return filteredType == undefined || peer.type == filteredType; + }), + [peers, searchQuery, filteredType] ); + const allTypesOption: { value: DBType | undefined; label: string } = { + value: undefined, + label: 'All', + }; + const availableTypes: { value: DBType | undefined; label: string }[] = + Array.from( + new Map( // Map filters out duplicates + peers.flatMap((peer) => [ + [peer.type, { value: peer.type, label: DBTypeToGoodText(peer.type) }], + ]) + ).values() + ); + availableTypes.unshift(allTypesOption); return ( , right: ( - ) => - setSearchQuery(e.target.value) - } - /> + <> + { + setFilteredType(val?.value); + }} + defaultValue={allTypesOption} + /> + ) => + setSearchQuery(e.target.value) + } + /> + ), }} header={ diff --git a/ui/components/PeerTypeComponent.tsx b/ui/components/PeerTypeComponent.tsx index 996d1a48af..089c41b92d 100644 --- a/ui/components/PeerTypeComponent.tsx +++ b/ui/components/PeerTypeComponent.tsx @@ -4,7 +4,7 @@ import { Label } from '@/lib/Label'; import Image from 'next/image'; import { DBTypeToImageMapping } from './PeerComponent'; -const DBTypeToGoodText = (ptype: DBType) => { +export const DBTypeToGoodText = (ptype: DBType) => { switch (ptype) { case DBType.POSTGRES: return 'PostgreSQL'; @@ -13,7 +13,7 @@ const DBTypeToGoodText = (ptype: DBType) => { case DBType.EVENTHUB: return 'Event Hubs'; case DBType.EVENTHUB_GROUP: - return 'Event Hubs'; + return 'Event Hubs (Group)'; case DBType.BIGQUERY: return 'BigQuery'; case DBType.S3: diff --git a/ui/tsconfig.json b/ui/tsconfig.json index 23ba4fd549..c443fefcce 100644 --- a/ui/tsconfig.json +++ b/ui/tsconfig.json @@ -1,6 +1,6 @@ { "compilerOptions": { - "target": "es5", + "target": "es6", "lib": ["dom", "dom.iterable", "esnext"], "allowJs": true, "skipLibCheck": true, From 864e5025637fd530bd262c6f359bf89731eb011c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philip=20Dub=C3=A9?= Date: Fri, 1 Dec 2023 07:31:53 +0000 Subject: [PATCH 11/12] chore: update dependencies (#720) Still holding aws-sdk-go & gosnowflake back --- flow/go.mod | 4 +- flow/go.sum | 22 +++--- nexus/Cargo.lock | 175 +++++++++++++++---------------------------- ui/package-lock.json | 138 +++++++++++++++++----------------- ui/package.json | 6 +- 5 files changed, 143 insertions(+), 202 deletions(-) diff --git a/flow/go.mod b/flow/go.mod index 40d5ee7fe0..f3343af084 100644 --- a/flow/go.mod +++ b/flow/go.mod @@ -8,7 +8,7 @@ require ( cloud.google.com/go/storage v1.35.1 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs v1.0.2 - github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.1.1 + github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.2.0 github.com/aws/aws-sdk-go v1.47.9 github.com/cenkalti/backoff/v4 v4.2.1 github.com/google/uuid v1.4.0 @@ -30,7 +30,7 @@ require ( github.com/twpayne/go-geos v0.14.0 github.com/urfave/cli/v2 v2.25.7 github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a - go.temporal.io/api v1.25.0 + go.temporal.io/api v1.26.0 go.temporal.io/sdk v1.25.1 go.uber.org/atomic v1.11.0 go.uber.org/automaxprocs v1.5.3 diff --git a/flow/go.sum b/flow/go.sum index 77d0b13010..ed4b41ab72 100644 --- a/flow/go.sum +++ b/flow/go.sum @@ -28,12 +28,12 @@ github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0 h1:d81/ng9rET2YqdVkVwkb6EX github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.0/go.mod h1:s4kgfzA0covAXNicZHDMN58jExvcng2mC/DepXiF1EI= github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs v1.0.2 h1:ujuMdFIUqhfohvpjjt7YmWn6Wk5Vlw9cwtGC0/BEwLU= github.com/Azure/azure-sdk-for-go/sdk/messaging/azeventhubs v1.0.2/go.mod h1:P39PnDHXbDhUV+BVw/8Nb7wQnM76jKUA7qx5T7eS+BU= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.1.1 h1:gZ1ZZvrVUhDNsGNpbo2N87Y0CJB8p3IS5UH9Z4Ui97g= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.1.1/go.mod h1:7fQVOnRA11ScLE8dOCWanXHQa2NMFOM2i0u/1VRICXA= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.1.2 h1:mLY+pNLjCUeKhgnAJWAKhEUQM+RJQo2H1fuGSw1Ky1E= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal v1.1.2/go.mod h1:FbdwsQ2EzwvXxOPcMFYO8ogEc9uMMIj3YkmCdXdAFmk= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0 h1:ECsQtyERDVz3NP3kvDOTLvbQhqWp/x9EsGKtb4ogUr8= -github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.0.0/go.mod h1:s1tW/At+xHqjNFvWU4G0c0Qv33KOhvbGNj0RCTQDV8s= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.2.0 h1:+dggnR89/BIIlRlQ6d19dkhhdd/mQUiQbXhyHUFiB4w= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/eventhub/armeventhub v1.2.0/go.mod h1:tI9M2Q/ueFi287QRkdrhb9LHm6ZnXgkVYLRC3FhYkPw= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal/v2 v2.0.0 h1:PTFGRSlMKCQelWwxUyYVEUqseBJVemLyqWJjvMyt0do= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal/v2 v2.0.0/go.mod h1:LRr2FzBTQlONPPa5HREE5+RjSCTXl7BwOvYOaWTqCaI= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.1.1 h1:7CBQ+Ei8SP2c6ydQTGCCrS35bDxgTMfoP2miAwK++OU= +github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armresources v1.1.1/go.mod h1:c/wcGeGx5FUPbM/JltUYHZcKmigwyVLJlDq+4HdtXaw= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.2.0 h1:Ma67P/GGprNwsslzEH6+Kb8nybI8jpDTm4Wmzu2ReK8= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.2.0/go.mod h1:c+Lifp3EDEamAkPVzMooRNOK6CZjNSdEnf1A7jsI9u4= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.0.0 h1:yfJe15aSwEQ6Oo6J+gdfdulPNoZ3TEhmbhLIoxZcA+U= @@ -248,8 +248,6 @@ github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA= github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= -github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA= -github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -299,8 +297,8 @@ github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ= github.com/robfig/cron v1.2.0/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= -github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= +github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= @@ -344,8 +342,8 @@ github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.temporal.io/api v1.25.0 h1:V6lIYuQlfmM1dc2vn6mIG5F2cY3EQ+xEjfTZ801Vpx8= -go.temporal.io/api v1.25.0/go.mod h1:LTJM9iMOIuiE5hRtym4Ne6I4rKlDGioUiscdD9D6N2Y= +go.temporal.io/api v1.26.0 h1:N4V0Daqa0qqK5+9LELSZV7clBYrwB4l33iaFfKgycPk= +go.temporal.io/api v1.26.0/go.mod h1:uVAcpQJ6bM4mxZ3m7vSHU65fHjrwy9ktGQMtsNfMZQQ= go.temporal.io/sdk v1.25.1 h1:jC9l9vHHz5OJ7PR6OjrpYSN4+uEG0bLe5rdF9nlMSGk= go.temporal.io/sdk v1.25.1/go.mod h1:X7iFKZpsj90BfszfpFCzLX8lwEJXbnRrl351/HyEgmU= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= diff --git a/nexus/Cargo.lock b/nexus/Cargo.lock index 21d09f0d08..e82f078039 100644 --- a/nexus/Cargo.lock +++ b/nexus/Cargo.lock @@ -112,7 +112,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" dependencies = [ - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -122,7 +122,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628" dependencies = [ "anstyle", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -450,9 +450,9 @@ dependencies = [ [[package]] name = "cargo_toml" -version = "0.17.0" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ca592ad99e6a0fd4b95153406138b997cc26ccd3cd0aecdfd4fbdbf1519bd77" +checksum = "4d1ece59890e746567b467253aea0adbe8a21784d0b025d8a306f66c391c2957" dependencies = [ "serde", "toml 0.8.8", @@ -585,7 +585,7 @@ checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6" dependencies = [ "is-terminal", "lazy_static", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -725,7 +725,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown 0.14.2", + "hashbrown 0.14.3", "lock_api", "once_cell", "parking_lot_core", @@ -873,7 +873,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f258a7194e7f7c2a7837a8913aeab7fd8c383457034fa20ce4dd3dcb813e8eb8" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -897,7 +897,7 @@ dependencies = [ "cfg-if", "libc", "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -962,9 +962,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -1131,9 +1131,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "glob" @@ -1171,9 +1171,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" [[package]] name = "hdrhistogram" @@ -1221,7 +1221,7 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" dependencies = [ - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -1354,9 +1354,9 @@ dependencies = [ [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -1379,7 +1379,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.2", + "hashbrown 0.14.3", ] [[package]] @@ -1406,7 +1406,7 @@ checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi", "rustix", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -1600,7 +1600,7 @@ checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" dependencies = [ "libc", "wasi", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -1742,9 +1742,9 @@ checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "openssl" -version = "0.10.59" +version = "0.10.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a257ad03cd8fb16ad4172fedf8094451e1af1c4b70097636ef2eac9a5f0cc33" +checksum = "79a4c6c3a2b158f7f8f2a2fc5a969fa3a068df6fc9dbb4a43845436e3af7c800" dependencies = [ "bitflags 2.4.1", "cfg-if", @@ -1783,9 +1783,9 @@ dependencies = [ [[package]] name = "openssl-sys" -version = "0.9.95" +version = "0.9.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9" +checksum = "3812c071ba60da8b5677cc12bcb1d42989a65553772897a7e0355545a819838f" dependencies = [ "cc", "libc", @@ -2072,9 +2072,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" @@ -2367,18 +2367,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" dependencies = [ "unicode-ident", ] [[package]] name = "prost" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5a410fc7882af66deb8d01d01737353cf3ad6204c408177ba494291a626312" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ "bytes", "prost-derive", @@ -2386,9 +2386,9 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa3d084c8704911bfefb2771be2f9b6c5c0da7343a71e0021ee3c665cada738" +checksum = "c55e02e35260070b6f716a2423c2ff1c3bb1642ddca6f99e1f26d06268a0e2d2" dependencies = [ "bytes", "heck", @@ -2408,9 +2408,9 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "065717a5dfaca4a83d2fe57db3487b311365200000551d7a364e715dbf4346bc" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", "itertools 0.11.0", @@ -2421,9 +2421,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8339f32236f590281e2f6368276441394fcd1b2133b549cc895d0ae80f2f9a52" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" dependencies = [ "prost", ] @@ -2722,7 +2722,7 @@ dependencies = [ "libc", "spin 0.9.8", "untrusted 0.9.0", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -2807,7 +2807,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -2889,7 +2889,7 @@ version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" dependencies = [ - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -3109,14 +3109,14 @@ dependencies = [ [[package]] name = "simple_logger" -version = "4.2.0" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2230cd5c29b815c9b699fb610b49a5ed65588f3509d9f0108be3a885da629333" +checksum = "da0ca6504625ee1aa5fda33913d2005eab98c7a42dd85f116ecce3ff54c9d3ef" dependencies = [ "colored", "log", "time", - "windows-sys 0.42.0", + "windows-sys", ] [[package]] @@ -3186,7 +3186,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -3345,7 +3345,7 @@ dependencies = [ "fastrand", "redox_syscall 0.4.1", "rustix", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -3450,7 +3450,7 @@ dependencies = [ "socket2 0.5.5", "tokio-macros", "tracing", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -3850,9 +3850,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7830e33f6e25723d41a63f77e434159dad02919f18f55a512b5f16f3b1d77138" +checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97" dependencies = [ "base64 0.21.5", "encoding_rs", @@ -3869,9 +3869,9 @@ dependencies = [ [[package]] name = "url" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", @@ -4039,9 +4039,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.2" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" +checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" [[package]] name = "which" @@ -4105,21 +4105,6 @@ dependencies = [ "windows-targets", ] -[[package]] -name = "windows-sys" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" @@ -4135,93 +4120,51 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -4244,7 +4187,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ "cfg-if", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] diff --git a/ui/package-lock.json b/ui/package-lock.json index 9436126925..7a9e9d70cb 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -26,13 +26,13 @@ "@radix-ui/react-toggle-group": "^1.0.4", "@radix-ui/react-tooltip": "^1.0.7", "@tremor/react": "^3.11.1", - "@types/node": "^20.9.3", + "@types/node": "^20.10.0", "@types/react": "^18.2.38", - "@types/react-dom": "^18.2.16", + "@types/react-dom": "^18.2.17", "classnames": "^2.3.2", "clsx": "^2.0.0", "long": "^5.2.3", - "lucide-react": "^0.292.0", + "lucide-react": "^0.293.0", "material-symbols": "^0.14.1", "moment": "^2.29.4", "moment-timezone": "^0.5.43", @@ -4893,9 +4893,9 @@ } }, "node_modules/@rushstack/eslint-patch": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.5.1.tgz", - "integrity": "sha512-6i/8UoL0P5y4leBIGzvkZdS85RDMG9y1ihZzmTZQ5LdHUYmZ7pKFoj8X0236s3lusPs1Fa5HTQUpwI+UfTcmeA==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.6.0.tgz", + "integrity": "sha512-2/U3GXA6YiPYQDLGwtGlnNgKYBSwCFIHf8Y9LUY5VATHdtbLlU0Y1R3QoBnT0aB4qv/BEiVVsj7LJXoQCgJ2vA==", "dev": true }, "node_modules/@sinclair/typebox": { @@ -5506,9 +5506,9 @@ } }, "node_modules/@storybook/builder-webpack5/node_modules/@types/node": { - "version": "18.18.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.11.tgz", - "integrity": "sha512-c1vku6qnTeujJneYH94/4aq73XrVcsJe35UPyAsSok1ijiKrkRzK+AxQPSpNMUnC03roWBBwJx/9I8V7lQoxmA==", + "version": "18.18.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.13.tgz", + "integrity": "sha512-vXYZGRrSCreZmq1rEjMRLXJhiy8MrIeVasx+PCVlP414N7CJLHnMf+juVvjdprHyH+XRy3zKZLHeNueOpJCn0g==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -5849,9 +5849,9 @@ } }, "node_modules/@storybook/core-common/node_modules/@types/node": { - "version": "18.18.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.11.tgz", - "integrity": "sha512-c1vku6qnTeujJneYH94/4aq73XrVcsJe35UPyAsSok1ijiKrkRzK+AxQPSpNMUnC03roWBBwJx/9I8V7lQoxmA==", + "version": "18.18.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.13.tgz", + "integrity": "sha512-vXYZGRrSCreZmq1rEjMRLXJhiy8MrIeVasx+PCVlP414N7CJLHnMf+juVvjdprHyH+XRy3zKZLHeNueOpJCn0g==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -5924,9 +5924,9 @@ } }, "node_modules/@storybook/core-server/node_modules/@types/node": { - "version": "18.18.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.11.tgz", - "integrity": "sha512-c1vku6qnTeujJneYH94/4aq73XrVcsJe35UPyAsSok1ijiKrkRzK+AxQPSpNMUnC03roWBBwJx/9I8V7lQoxmA==", + "version": "18.18.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.13.tgz", + "integrity": "sha512-vXYZGRrSCreZmq1rEjMRLXJhiy8MrIeVasx+PCVlP414N7CJLHnMf+juVvjdprHyH+XRy3zKZLHeNueOpJCn0g==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -6003,18 +6003,18 @@ } }, "node_modules/@storybook/core-webpack/node_modules/@types/node": { - "version": "18.18.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.11.tgz", - "integrity": "sha512-c1vku6qnTeujJneYH94/4aq73XrVcsJe35UPyAsSok1ijiKrkRzK+AxQPSpNMUnC03roWBBwJx/9I8V7lQoxmA==", + "version": "18.18.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.13.tgz", + "integrity": "sha512-vXYZGRrSCreZmq1rEjMRLXJhiy8MrIeVasx+PCVlP414N7CJLHnMf+juVvjdprHyH+XRy3zKZLHeNueOpJCn0g==", "dev": true, "dependencies": { "undici-types": "~5.26.4" } }, "node_modules/@storybook/csf": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@storybook/csf/-/csf-0.1.1.tgz", - "integrity": "sha512-4hE3AlNVxR60Wc5KSC68ASYzUobjPqtSKyhV6G+ge0FIXU55N5nTY7dXGRZHQGDBPq+XqchMkIdlkHPRs8nTHg==", + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@storybook/csf/-/csf-0.1.2.tgz", + "integrity": "sha512-ePrvE/pS1vsKR9Xr+o+YwdqNgHUyXvg+1Xjx0h9LrVx7Zq4zNe06pd63F5EvzTbCbJsHj7GHr9tkiaqm7U8WRA==", "dev": true, "dependencies": { "type-fest": "^2.19.0" @@ -6258,9 +6258,9 @@ } }, "node_modules/@storybook/nextjs/node_modules/@types/node": { - "version": "18.18.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.11.tgz", - "integrity": "sha512-c1vku6qnTeujJneYH94/4aq73XrVcsJe35UPyAsSok1ijiKrkRzK+AxQPSpNMUnC03roWBBwJx/9I8V7lQoxmA==", + "version": "18.18.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.13.tgz", + "integrity": "sha512-vXYZGRrSCreZmq1rEjMRLXJhiy8MrIeVasx+PCVlP414N7CJLHnMf+juVvjdprHyH+XRy3zKZLHeNueOpJCn0g==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -6402,9 +6402,9 @@ } }, "node_modules/@storybook/preset-react-webpack/node_modules/@types/node": { - "version": "18.18.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.11.tgz", - "integrity": "sha512-c1vku6qnTeujJneYH94/4aq73XrVcsJe35UPyAsSok1ijiKrkRzK+AxQPSpNMUnC03roWBBwJx/9I8V7lQoxmA==", + "version": "18.18.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.13.tgz", + "integrity": "sha512-vXYZGRrSCreZmq1rEjMRLXJhiy8MrIeVasx+PCVlP414N7CJLHnMf+juVvjdprHyH+XRy3zKZLHeNueOpJCn0g==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -6559,9 +6559,9 @@ } }, "node_modules/@storybook/react/node_modules/@types/node": { - "version": "18.18.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.11.tgz", - "integrity": "sha512-c1vku6qnTeujJneYH94/4aq73XrVcsJe35UPyAsSok1ijiKrkRzK+AxQPSpNMUnC03roWBBwJx/9I8V7lQoxmA==", + "version": "18.18.13", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.18.13.tgz", + "integrity": "sha512-vXYZGRrSCreZmq1rEjMRLXJhiy8MrIeVasx+PCVlP414N7CJLHnMf+juVvjdprHyH+XRy3zKZLHeNueOpJCn0g==", "dev": true, "dependencies": { "undici-types": "~5.26.4" @@ -7202,9 +7202,9 @@ "dev": true }, "node_modules/@types/node": { - "version": "20.9.3", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.9.3.tgz", - "integrity": "sha512-nk5wXLAXGBKfrhLB0cyHGbSqopS+nz0BUgZkUQqSHSSgdee0kssp1IAqlQOu333bW+gMNs2QREx7iynm19Abxw==", + "version": "20.10.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.0.tgz", + "integrity": "sha512-D0WfRmU9TQ8I9PFx9Yc+EBHw+vSpIub4IDvQivcp26PtPrdMGAq5SDcpXEo/epqa/DXotVpekHiLNTg3iaKXBQ==", "dependencies": { "undici-types": "~5.26.4" } @@ -7264,9 +7264,9 @@ } }, "node_modules/@types/react-dom": { - "version": "18.2.16", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.16.tgz", - "integrity": "sha512-766c37araZ9vxtYs25gvY2wNdFWsT2ZiUvOd0zMhTaoGj6B911N8CKQWgXXJoPMLF3J82thpRqQA7Rf3rBwyJw==", + "version": "18.2.17", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.17.tgz", + "integrity": "sha512-rvrT/M7Df5eykWFxn6MYt5Pem/Dbyc1N8Y0S9Mrkw2WFCRiqUgw9P7ul2NpwsXCSM1DVdENzdG9J5SreqfAIWg==", "dependencies": { "@types/react": "*" } @@ -7280,9 +7280,9 @@ } }, "node_modules/@types/scheduler": { - "version": "0.16.7", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.7.tgz", - "integrity": "sha512-8g25Nl3AuB1KulTlSUsUhUo/oBgBU6XIXQ+XURpeioEbEJvkO7qI4vDfREv3vJYHHzqXjcAHvoJy4pTtSQNZtA==" + "version": "0.16.8", + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.8.tgz", + "integrity": "sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A==" }, "node_modules/@types/semver": { "version": "7.5.6", @@ -7312,9 +7312,9 @@ } }, "node_modules/@types/stylis": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/@types/stylis/-/stylis-4.2.3.tgz", - "integrity": "sha512-86XLCVEmWagiUEbr2AjSbeY4qHN9jMm3pgM3PuBYfLIbT0MpDSnA3GA/4W7KoH/C/eeK77kNaeIxZzjhKYIBgw==" + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/@types/stylis/-/stylis-4.2.4.tgz", + "integrity": "sha512-36ZrGJ8fgtBr6nwNnuJ9jXIj+bn/pF6UoqmrQT7+Y99+tFFeHHsoR54+194dHdyhPjgbeoNz3Qru0oRt0l6ASQ==" }, "node_modules/@types/unist": { "version": "2.0.10", @@ -8765,9 +8765,9 @@ } }, "node_modules/big-integer": { - "version": "1.6.51", - "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", - "integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==", + "version": "1.6.52", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", + "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", "dev": true, "engines": { "node": ">=0.6" @@ -9320,9 +9320,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001563", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001563.tgz", - "integrity": "sha512-na2WUmOxnwIZtwnFI2CZ/3er0wdNzU7hN+cPYz/z2ajHThnkWjNBOpEPP4n+4r2WPM847JaMotaJE3bnfzjyKw==", + "version": "1.0.30001565", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001565.tgz", + "integrity": "sha512-xrE//a3O7TP0vaJ8ikzkD2c2NgcVUvsEe2IvFTntV4Yd1Z9FVzh+gW+enX96L0psrbaFMcVcH2l90xNuGDWc8w==", "funding": [ { "type": "opencollective", @@ -9491,9 +9491,9 @@ } }, "node_modules/cli-spinners": { - "version": "2.9.1", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.1.tgz", - "integrity": "sha512-jHgecW0pxkonBJdrKsqxgRX9AcG+u/5k0Q7WPDfi8AogLAdwxEkyYYNWwZ5GvVFoFx2uiY1eNcSK00fh+1+FyQ==", + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", "dev": true, "engines": { "node": ">=6" @@ -10858,9 +10858,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.589", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.589.tgz", - "integrity": "sha512-zF6y5v/YfoFIgwf2dDfAqVlPPsyQeWNpEWXbAlDUS8Ax4Z2VoiiZpAPC0Jm9hXEkJm2vIZpwB6rc4KnLTQffbQ==", + "version": "1.4.594", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.594.tgz", + "integrity": "sha512-xT1HVAu5xFn7bDfkjGQi9dNpMqGchUkebwf1GL7cZN32NSwwlHRPMSDJ1KN6HkS0bWUtndbSQZqvpQftKG2uFQ==", "dev": true }, "node_modules/elliptic": { @@ -14734,9 +14734,9 @@ } }, "node_modules/lucide-react": { - "version": "0.292.0", - "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.292.0.tgz", - "integrity": "sha512-rRgUkpEHWpa5VCT66YscInCQmQuPCB1RFRzkkxMxg4b+jaL0V12E3riWWR2Sh5OIiUhCwGW/ZExuEO4Az32E6Q==", + "version": "0.293.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.293.0.tgz", + "integrity": "sha512-g3AN0EYITCpAjNgLHrKrFWvIJzZy0Y9OPBaonyKw1cM+nZE6piOM+TiuQdYfha7oa76TMiDaWXQHE44CEqsrzw==", "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0" } @@ -15855,9 +15855,9 @@ } }, "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.0.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.3.tgz", - "integrity": "sha512-B7gr+F6MkqB3uzINHXNctGieGsRTMwIBgxkp0yq/5BwcuDzD4A8wQpHQW6vDAm1uKSLQghmRdD9sKqf2vJ1cEg==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.1.0.tgz", + "integrity": "sha512-/1clY/ui8CzjKFyjdvwPWJUYKiFVXG2I2cY0ssG7h4+hwk+XOIX7ZSG9Q7TW8TW3Kp3BUSqgFWBLgL4PJ+Blag==", "dev": true, "engines": { "node": "14 || >=16.14" @@ -19129,9 +19129,9 @@ } }, "node_modules/tocbot": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/tocbot/-/tocbot-4.22.0.tgz", - "integrity": "sha512-YHCs00HCNiHxUhksloa36fTfMEXEWV+vdPn3ARQfmj2u3PcUYIjJkfc+ABUfCF9Eb+LSy/QzuLl256fbsRnpHQ==", + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/tocbot/-/tocbot-4.23.0.tgz", + "integrity": "sha512-5DWuSZXsqG894mkGb8ZsQt9myyQyVxE50AiGRZ0obV0BVUTVkaZmc9jbgpknaAAPUm4FIrzGkEseD6FuQJYJDQ==", "dev": true }, "node_modules/toidentifier": { @@ -19558,9 +19558,9 @@ } }, "node_modules/unplugin/node_modules/webpack-virtual-modules": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.0.tgz", - "integrity": "sha512-KnaMTE6EItz/f2q4Gwg5/rmeKVi79OR58NoYnwDJqCk9ywMtTGbBnBcfoBtN4QbYu0lWXvyMoH2Owxuhe4qI6Q==", + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.1.tgz", + "integrity": "sha512-poXpCylU7ExuvZK8z+On3kX+S8o/2dQ/SVYueKA0D4WEMXROXgY8Ez50/bQEUmvoSMMrWcrJqCHuhAbsiwg7Dg==", "dev": true }, "node_modules/untildify": { @@ -19749,9 +19749,9 @@ } }, "node_modules/v8-to-istanbul": { - "version": "9.1.3", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.3.tgz", - "integrity": "sha512-9lDD+EVI2fjFsMWXc6dy5JJzBsVTcQ2fVkfBvncZ6xJWG9wtBhOldG+mHkSL0+V1K/xgZz0JDO5UT5hFwHUghg==", + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz", + "integrity": "sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA==", "dev": true, "dependencies": { "@jridgewell/trace-mapping": "^0.3.12", diff --git a/ui/package.json b/ui/package.json index 863998d86b..3f9a6d01d2 100644 --- a/ui/package.json +++ b/ui/package.json @@ -32,13 +32,13 @@ "@radix-ui/react-toggle-group": "^1.0.4", "@radix-ui/react-tooltip": "^1.0.7", "@tremor/react": "^3.11.1", - "@types/node": "^20.9.3", + "@types/node": "^20.10.0", "@types/react": "^18.2.38", - "@types/react-dom": "^18.2.16", + "@types/react-dom": "^18.2.17", "classnames": "^2.3.2", "clsx": "^2.0.0", "long": "^5.2.3", - "lucide-react": "^0.292.0", + "lucide-react": "^0.293.0", "material-symbols": "^0.14.1", "moment": "^2.29.4", "moment-timezone": "^0.5.43", From 40117c554525ec52a502abc3514785f618e49af9 Mon Sep 17 00:00:00 2001 From: Amogh Bharadwaj Date: Fri, 1 Dec 2023 22:48:29 +0530 Subject: [PATCH 12/12] Remove connection string display (#744) Removes the connection string display in postgres peer page --- ui/app/peers/[peerName]/page.tsx | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/ui/app/peers/[peerName]/page.tsx b/ui/app/peers/[peerName]/page.tsx index 4e88c5c3ee..78bd430bf3 100644 --- a/ui/app/peers/[peerName]/page.tsx +++ b/ui/app/peers/[peerName]/page.tsx @@ -1,36 +1,16 @@ -import { CopyButton } from '@/components/CopyButton'; import ReloadButton from '@/components/ReloadButton'; -import { PostgresConfig } from '@/grpc_generated/peers'; import { PeerSlotResponse, PeerStatResponse } from '@/grpc_generated/route'; import { Label } from '@/lib/Label'; import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; import Link from 'next/link'; -import prisma from '../../utils/prisma'; import SlotTable from './slottable'; import StatTable from './stattable'; -import { connStringStyle } from './style'; export const dynamic = 'force-dynamic'; type DataConfigProps = { params: { peerName: string }; }; -async function fetchConnectionString(peerName: string) { - const config = await prisma.peers.findUnique({ - select: { - options: true, - }, - where: { - name: peerName, - }, - }); - if (config) { - const pgConfig = PostgresConfig.decode(config.options); - return `postgresql://${pgConfig.user}:${pgConfig.password}@${pgConfig.host}:${pgConfig.port}`; - } - return ''; -} - const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const getSlotData = async () => { const flowServiceAddr = GetFlowHttpAddressFromEnv(); @@ -71,7 +51,6 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const slots = await getSlotData(); const stats = await getStatData(); - const connectionString = await fetchConnectionString(peerName); return (
@@ -79,13 +58,6 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => {
{peerName}
-
- -
- {connectionString} - -
-
{slots && stats ? (