Skip to content

Commit

Permalink
Merge branch 'main' into cdc-parallel-sync-normalize
Browse files Browse the repository at this point in the history
  • Loading branch information
serprex authored Dec 12, 2023
2 parents db144de + f29deb4 commit d66c98e
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 20 deletions.
31 changes: 15 additions & 16 deletions flow/cmd/mirror_status.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,26 +64,25 @@ func (h *FlowRequestHandler) CDCFlowStatus(
}

var initialCopyStatus *protos.SnapshotStatus
if config.DoInitialCopy {
cloneJobNames, err := h.getCloneTableFlowNames(ctx, req.FlowJobName)

cloneJobNames, err := h.getCloneTableFlowNames(ctx, req.FlowJobName)
if err != nil {
return nil, err
}

cloneStatuses := []*protos.QRepMirrorStatus{}
for _, cloneJobName := range cloneJobNames {
cloneStatus, err := h.QRepFlowStatus(ctx, &protos.MirrorStatusRequest{
FlowJobName: cloneJobName,
})
if err != nil {
return nil, err
}
cloneStatuses = append(cloneStatuses, cloneStatus)
}

cloneStatuses := []*protos.QRepMirrorStatus{}
for _, cloneJobName := range cloneJobNames {
cloneStatus, err := h.QRepFlowStatus(ctx, &protos.MirrorStatusRequest{
FlowJobName: cloneJobName,
})
if err != nil {
return nil, err
}
cloneStatuses = append(cloneStatuses, cloneStatus)
}

initialCopyStatus = &protos.SnapshotStatus{
Clones: cloneStatuses,
}
initialCopyStatus = &protos.SnapshotStatus{
Clones: cloneStatuses,
}

return &protos.CDCMirrorStatus{
Expand Down
3 changes: 2 additions & 1 deletion flow/connectors/bigquery/bigquery.go
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,8 @@ func (c *BigQueryConnector) ReplayTableSchemaDeltas(flowJobName string,
}

for _, addedColumn := range schemaDelta.AddedColumns {
_, err := c.client.Query(fmt.Sprintf("ALTER TABLE %s.%s ADD COLUMN `%s` %s", c.datasetID,
_, err := c.client.Query(fmt.Sprintf(
"ALTER TABLE %s.%s ADD COLUMN IF NOT EXISTS `%s` %s", c.datasetID,
schemaDelta.DstTableName, addedColumn.ColumnName,
qValueKindToBigQueryType(addedColumn.ColumnType))).Read(c.ctx)
if err != nil {
Expand Down
3 changes: 2 additions & 1 deletion flow/connectors/postgres/postgres.go
Original file line number Diff line number Diff line change
Expand Up @@ -710,7 +710,8 @@ func (c *PostgresConnector) ReplayTableSchemaDeltas(flowJobName string,
}

for _, addedColumn := range schemaDelta.AddedColumns {
_, err = tableSchemaModifyTx.Exec(c.ctx, fmt.Sprintf("ALTER TABLE %s ADD COLUMN \"%s\" %s",
_, err = tableSchemaModifyTx.Exec(c.ctx, fmt.Sprintf(
"ALTER TABLE %s ADD COLUMN IF NOT EXISTS \"%s\" %s",
schemaDelta.DstTableName, addedColumn.ColumnName,
qValueKindToPostgresType(addedColumn.ColumnType)))
if err != nil {
Expand Down
4 changes: 2 additions & 2 deletions flow/connectors/utils/cdc_records/cdc_records_storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ import (

const (
/** begin with in-memory store, and then switch to Pebble DB
when the number of stored records crosses 1M
when the number of stored records crosses 100k
**/
defaultNumRecordsSwitchThreshold = 1_000_000
defaultNumRecordsSwitchThreshold = 1_00_000
)

func encVal(val any) ([]byte, error) {
Expand Down

0 comments on commit d66c98e

Please sign in to comment.