From 4a252fb6919e0355ee63b8d08186c12f644a1eb2 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 13 May 2024 11:37:50 +0530 Subject: [PATCH 01/31] boilerplate --- flow/cmd/custom_sync.go | 102 ++++++++++++++++++++++++ flow/cmd/handler.go | 10 ++- flow/cmd/mirror_status.go | 6 +- flow/cmd/validate_mirror.go | 2 +- flow/connectors/core.go | 3 + flow/e2e/postgres/peer_flow_pg_test.go | 8 +- flow/e2e/postgres/qrep_flow_pg_test.go | 8 +- flow/model/signals.go | 20 +++-- flow/workflows/cdc_flow.go | 30 +++++-- flow/workflows/qrep_flow.go | 10 +-- flow/workflows/xmin_flow.go | 4 +- nexus/flow-rs/src/grpc.rs | 3 +- protos/route.proto | 23 +++++- ui/app/mirrors/[mirrorId]/edit/page.tsx | 1 + ui/app/mirrors/[mirrorId]/handlers.ts | 1 + 15 files changed, 200 insertions(+), 31 deletions(-) create mode 100644 flow/cmd/custom_sync.go diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go new file mode 100644 index 0000000000..25c16fb910 --- /dev/null +++ b/flow/cmd/custom_sync.go @@ -0,0 +1,102 @@ +package cmd + +import ( + "context" + "fmt" + + "github.com/PeerDB-io/peer-flow/generated/protos" + peerflow "github.com/PeerDB-io/peer-flow/workflows" +) + +const peerdbPauseGuideDocLink = "https://docs.peerdb.io/features/pause-mirror" + +func (h *FlowRequestHandler) CustomSyncFlow( + ctx context.Context, req *protos.CreateCustomFlowRequest, +) (*protos.CreateCustomFlowResponse, error) { + // ---- REQUEST VALIDATION ---- + if req.FlowJobName == "" { + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: "Flow job name is not provided", + Ok: false, + }, nil + } + + if req.NumberOfSyncs <= 0 || req.NumberOfSyncs > peerflow.MaxSyncsPerCdcFlow { + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: fmt.Sprintf("Sync number request must be between 1 and %d (inclusive). Requested number: %d", + peerflow.MaxSyncsPerCdcFlow, req.NumberOfSyncs), + Ok: false, + }, nil + } + + mirrorExists, err := h.CheckIfMirrorNameExists(ctx, req.FlowJobName) + if err != nil { + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: "Server error: unable to check if mirror " + req.FlowJobName + " exists.", + Ok: false, + }, nil + } + if !mirrorExists { + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: req.FlowJobName + "does not exist. This may be because it was dropped.", + Ok: false, + }, nil + } + + mirrorStatusResponse, _ := h.MirrorStatus(ctx, &protos.MirrorStatusRequest{ + FlowJobName: req.FlowJobName, + }) + if mirrorStatusResponse.ErrorMessage != "" { + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: fmt.Sprintf("Server error: unable to check the status of mirror %s: %s", + req.FlowJobName, mirrorStatusResponse.ErrorMessage), + Ok: false, + }, nil + } + + if mirrorStatusResponse.CurrentFlowState != protos.FlowStatus_STATUS_PAUSED { + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: fmt.Sprintf(`Requested mirror %s is not paused. This is a requirement. + The mirror can be paused via PeerDB UI. Please follow %s`, + req.FlowJobName, peerdbPauseGuideDocLink), + Ok: false, + }, nil + } + // ---- REQUEST VALIDATED ---- + + // Resume mirror with custom sync number + _, err = h.FlowStateChange(ctx, &protos.FlowStateChangeRequest{ + FlowJobName: req.FlowJobName, + RequestedFlowState: protos.FlowStatus_STATUS_RUNNING, + FlowConfigUpdate: nil, + CustomNumberOfSyncs: req.NumberOfSyncs, + }) + if err != nil { + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: fmt.Sprintf("Unable to kick off sync for mirror %s:%s", + req.FlowJobName, err.Error()), + Ok: false, + }, nil + } + + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: req.NumberOfSyncs, + ErrorMessage: "", + Ok: true, + }, nil +} diff --git a/flow/cmd/handler.go b/flow/cmd/handler.go index f338b2185c..b349000e08 100644 --- a/flow/cmd/handler.go +++ b/flow/cmd/handler.go @@ -430,16 +430,22 @@ func (h *FlowRequestHandler) FlowStateChange( h.temporalClient, workflowID, "", - model.PauseSignal, + model.CDCFlowSignalProperties{ + Signal: model.PauseSignal, + }, ) } else if req.RequestedFlowState == protos.FlowStatus_STATUS_RUNNING && currState == protos.FlowStatus_STATUS_PAUSED { + slog.Info("Resume handler", slog.Int("customNumberOfSyncs", int(req.CustomNumberOfSyncs))) err = model.FlowSignal.SignalClientWorkflow( ctx, h.temporalClient, workflowID, "", - model.NoopSignal, + model.CDCFlowSignalProperties{ + Signal: model.NoopSignal, + CustomNumberOfSyncs: int(req.CustomNumberOfSyncs), + }, ) } else if req.RequestedFlowState == protos.FlowStatus_STATUS_TERMINATED && (currState != protos.FlowStatus_STATUS_TERMINATED) { diff --git a/flow/cmd/mirror_status.go b/flow/cmd/mirror_status.go index 06f64d34a9..9e72ce6466 100644 --- a/flow/cmd/mirror_status.go +++ b/flow/cmd/mirror_status.go @@ -30,13 +30,15 @@ func (h *FlowRequestHandler) MirrorStatus( workflowID, err := h.getWorkflowID(ctx, req.FlowJobName) if err != nil { - return nil, err + return &protos.MirrorStatusResponse{ + ErrorMessage: "unable to get workflow ID " + err.Error(), + }, nil } currState, err := h.getWorkflowStatus(ctx, workflowID) if err != nil { return &protos.MirrorStatusResponse{ - ErrorMessage: "unable to get flow state: " + err.Error(), + ErrorMessage: "unable to get workflow status " + err.Error(), }, nil } diff --git a/flow/cmd/validate_mirror.go b/flow/cmd/validate_mirror.go index b4304b0504..f67a4f1d40 100644 --- a/flow/cmd/validate_mirror.go +++ b/flow/cmd/validate_mirror.go @@ -127,7 +127,7 @@ func (h *FlowRequestHandler) CheckIfMirrorNameExists(ctx context.Context, mirror var nameExists pgtype.Bool err := h.pool.QueryRow(ctx, "SELECT EXISTS(SELECT * FROM flows WHERE name = $1)", mirrorName).Scan(&nameExists) if err != nil { - return true, fmt.Errorf("failed to check if mirror name exists: %v", err) + return false, fmt.Errorf("failed to check if mirror name exists: %v", err) } return nameExists.Bool, nil diff --git a/flow/connectors/core.go b/flow/connectors/core.go index 0f25c4509c..613aaa3a39 100644 --- a/flow/connectors/core.go +++ b/flow/connectors/core.go @@ -248,6 +248,9 @@ type RenameTablesConnector interface { } func GetConnector(ctx context.Context, config *protos.Peer) (Connector, error) { + if config == nil { + return nil, errors.ErrUnsupported + } switch inner := config.Config.(type) { case *protos.Peer_PostgresConfig: return connpostgres.NewPostgresConnector(ctx, inner.PostgresConfig) diff --git a/flow/e2e/postgres/peer_flow_pg_test.go b/flow/e2e/postgres/peer_flow_pg_test.go index 68d6d950ee..74dd118255 100644 --- a/flow/e2e/postgres/peer_flow_pg_test.go +++ b/flow/e2e/postgres/peer_flow_pg_test.go @@ -991,7 +991,9 @@ func (s PeerFlowE2ETestSuitePG) Test_Dynamic_Mirror_Config_Via_Signals() { if !s.t.Failed() { addRows(1) - e2e.SignalWorkflow(env, model.FlowSignal, model.PauseSignal) + e2e.SignalWorkflow(env, model.FlowSignal, model.CDCFlowSignalProperties{ + Signal: model.PauseSignal, + }) addRows(1) e2e.EnvWaitFor(s.t, env, 1*time.Minute, "paused workflow", func() bool { // keep adding 1 more row - finishing another sync @@ -1015,7 +1017,9 @@ func (s PeerFlowE2ETestSuitePG) Test_Dynamic_Mirror_Config_Via_Signals() { // add rows to both tables before resuming - should handle addRows(18) - e2e.SignalWorkflow(env, model.FlowSignal, model.NoopSignal) + e2e.SignalWorkflow(env, model.FlowSignal, model.CDCFlowSignalProperties{ + Signal: model.NoopSignal, + }) e2e.EnvWaitFor(s.t, env, 1*time.Minute, "resumed workflow", func() bool { return getFlowStatus() == protos.FlowStatus_STATUS_RUNNING diff --git a/flow/e2e/postgres/qrep_flow_pg_test.go b/flow/e2e/postgres/qrep_flow_pg_test.go index eb67b43592..a36b93be2a 100644 --- a/flow/e2e/postgres/qrep_flow_pg_test.go +++ b/flow/e2e/postgres/qrep_flow_pg_test.go @@ -416,7 +416,9 @@ func (s PeerFlowE2ETestSuitePG) Test_Pause() { tc := e2e.NewTemporalClient(s.t) env := e2e.RunQRepFlowWorkflow(tc, config) - e2e.SignalWorkflow(env, model.FlowSignal, model.PauseSignal) + e2e.SignalWorkflow(env, model.FlowSignal, model.CDCFlowSignalProperties{ + Signal: model.PauseSignal, + }) e2e.EnvWaitFor(s.t, env, 3*time.Minute, "pausing", func() bool { response, err := env.Query(shared.QRepFlowStateQuery) @@ -431,7 +433,9 @@ func (s PeerFlowE2ETestSuitePG) Test_Pause() { } return state.CurrentFlowStatus == protos.FlowStatus_STATUS_PAUSED }) - e2e.SignalWorkflow(env, model.FlowSignal, model.NoopSignal) + e2e.SignalWorkflow(env, model.FlowSignal, model.CDCFlowSignalProperties{ + Signal: model.NoopSignal, + }) e2e.EnvWaitFor(s.t, env, time.Minute, "unpausing", func() bool { response, err := env.Query(shared.QRepFlowStateQuery) if err != nil { diff --git a/flow/model/signals.go b/flow/model/signals.go index 53fcf7e1e5..e74285f01a 100644 --- a/flow/model/signals.go +++ b/flow/model/signals.go @@ -100,6 +100,10 @@ func (self TypedReceiveChannel[T]) AddToSelector(selector workflow.Selector, f f } type CDCFlowSignal int64 +type CDCFlowSignalProperties struct { + Signal CDCFlowSignal + CustomNumberOfSyncs int +} const ( NoopSignal CDCFlowSignal = iota @@ -109,25 +113,31 @@ const ( func FlowSignalHandler(activeSignal CDCFlowSignal, v CDCFlowSignal, logger log.Logger, -) CDCFlowSignal { +) CDCFlowSignalProperties { switch v { case PauseSignal: logger.Info("received pause signal") if activeSignal == NoopSignal { logger.Info("workflow was running, pausing it") - return v + return CDCFlowSignalProperties{ + Signal: v, + } } case NoopSignal: logger.Info("received resume signal") if activeSignal == PauseSignal { logger.Info("workflow was paused, resuming it") - return v + return CDCFlowSignalProperties{ + Signal: v, + } } } - return activeSignal + return CDCFlowSignalProperties{ + Signal: activeSignal, + } } -var FlowSignal = TypedSignal[CDCFlowSignal]{ +var FlowSignal = TypedSignal[CDCFlowSignalProperties]{ Name: "peer-flow-signal", } diff --git a/flow/workflows/cdc_flow.go b/flow/workflows/cdc_flow.go index 51ccaf7f5f..4d5ce5110e 100644 --- a/flow/workflows/cdc_flow.go +++ b/flow/workflows/cdc_flow.go @@ -82,7 +82,7 @@ func GetChildWorkflowID( type CDCFlowWorkflowResult = CDCFlowWorkflowState const ( - maxSyncsPerCdcFlow = 32 + MaxSyncsPerCdcFlow = 32 ) func processCDCFlowConfigUpdate( @@ -225,7 +225,6 @@ func CDCFlowWorkflow( logger := log.With(workflow.GetLogger(ctx), slog.String(string(shared.FlowNameKey), cfg.FlowJobName)) flowSignalChan := model.FlowSignal.GetSignalChannel(ctx) - err := workflow.SetQueryHandler(ctx, shared.CDCFlowStateQuery, func() (CDCFlowWorkflowState, error) { return *state, nil }) @@ -250,11 +249,20 @@ func CDCFlowWorkflow( shared.MirrorNameSearchAttribute: cfg.FlowJobName, } + var syncCountLimit int if state.ActiveSignal == model.PauseSignal { selector := workflow.NewNamedSelector(ctx, "PauseLoop") selector.AddReceive(ctx.Done(), func(_ workflow.ReceiveChannel, _ bool) {}) - flowSignalChan.AddToSelector(selector, func(val model.CDCFlowSignal, _ bool) { - state.ActiveSignal = model.FlowSignalHandler(state.ActiveSignal, val, logger) + flowSignalChan.AddToSelector(selector, func(val model.CDCFlowSignalProperties, _ bool) { + cdcFlowData := model.FlowSignalHandler(state.ActiveSignal, val.Signal, logger) + slog.Info("value of signal", slog.Any("signal", cdcFlowData.Signal)) + slog.Info("cdc signal val", slog.Any("val", val)) + state.ActiveSignal = cdcFlowData.Signal + syncCountLimit = val.CustomNumberOfSyncs + if syncCountLimit <= 0 { + syncCountLimit = MaxSyncsPerCdcFlow + } + slog.Info("sync limit reception inside pause", slog.Int("limit", syncCountLimit)) }) addCdcPropertiesSignalListener(ctx, logger, selector, state) @@ -411,7 +419,6 @@ func CDCFlowWorkflow( var restart, finished bool syncCount := 0 - syncFlowOpts := workflow.ChildWorkflowOptions{ WorkflowID: syncFlowID, ParentClosePolicy: enums.PARENT_CLOSE_POLICY_REQUEST_CANCEL, @@ -480,13 +487,20 @@ func CDCFlowWorkflow( } logger.Info("normalize finished, finishing") + if syncCount == int(syncCountLimit) { + logger.Info("sync count limit reached, pausing", + slog.Int("limit", syncCountLimit), + slog.Int("count", syncCount)) + state.ActiveSignal = model.PauseSignal + } normFlowFuture = nil restart = true finished = true }) - flowSignalChan.AddToSelector(mainLoopSelector, func(val model.CDCFlowSignal, _ bool) { - state.ActiveSignal = model.FlowSignalHandler(state.ActiveSignal, val, logger) + flowSignalChan.AddToSelector(mainLoopSelector, func(val model.CDCFlowSignalProperties, _ bool) { + cdcFlowData := model.FlowSignalHandler(state.ActiveSignal, val.Signal, logger) + state.ActiveSignal = cdcFlowData.Signal }) syncResultChan := model.SyncResultSignal.GetSignalChannel(ctx) @@ -528,7 +542,7 @@ func CDCFlowWorkflow( return state, err } - if state.ActiveSignal == model.PauseSignal || syncCount >= maxSyncsPerCdcFlow { + if state.ActiveSignal == model.PauseSignal || syncCount >= MaxSyncsPerCdcFlow { restart = true if syncFlowFuture != nil { err := model.SyncStopSignal.SignalChildWorkflow(ctx, syncFlowFuture, struct{}{}).Get(ctx, nil) diff --git a/flow/workflows/qrep_flow.go b/flow/workflows/qrep_flow.go index 8be2619866..5202d7bd04 100644 --- a/flow/workflows/qrep_flow.go +++ b/flow/workflows/qrep_flow.go @@ -322,7 +322,7 @@ func (q *QRepFlowExecution) consolidatePartitions(ctx workflow.Context) error { func (q *QRepFlowExecution) waitForNewRows( ctx workflow.Context, - signalChan model.TypedReceiveChannel[model.CDCFlowSignal], + signalChan model.TypedReceiveChannel[model.CDCFlowSignalProperties], lastPartition *protos.QRepPartition, ) error { ctx = workflow.WithChildOptions(ctx, workflow.ChildWorkflowOptions{ @@ -336,8 +336,8 @@ func (q *QRepFlowExecution) waitForNewRows( var newRows bool var waitErr error waitSelector := workflow.NewNamedSelector(ctx, "WaitForRows") - signalChan.AddToSelector(waitSelector, func(val model.CDCFlowSignal, _ bool) { - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, q.logger) + signalChan.AddToSelector(waitSelector, func(val model.CDCFlowSignalProperties, _ bool) { + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, q.logger).Signal }) waitSelector.AddFuture(future, func(f workflow.Future) { newRows = true @@ -541,7 +541,7 @@ func QRepFlowWorkflow( // only place we block on receive, so signal processing is immediate val, ok, _ := signalChan.ReceiveWithTimeout(ctx, 1*time.Minute) if ok { - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, q.logger) + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, q.logger).Signal } else if err := ctx.Err(); err != nil { return err } @@ -617,7 +617,7 @@ func QRepFlowWorkflow( if !ok { break } - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, q.logger) + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, q.logger).Signal } logger.Info("Continuing as new workflow", diff --git a/flow/workflows/xmin_flow.go b/flow/workflows/xmin_flow.go index 4cd6deece7..f24d02c68b 100644 --- a/flow/workflows/xmin_flow.go +++ b/flow/workflows/xmin_flow.go @@ -42,7 +42,7 @@ func XminFlowWorkflow( // only place we block on receive, so signal processing is immediate val, ok, _ := signalChan.ReceiveWithTimeout(ctx, 1*time.Minute) if ok { - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, logger) + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, logger).Signal } else if err := ctx.Err(); err != nil { return err } @@ -109,7 +109,7 @@ func XminFlowWorkflow( if !ok { break } - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, q.logger) + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, q.logger).Signal } logger.Info("Continuing as new workflow", diff --git a/nexus/flow-rs/src/grpc.rs b/nexus/flow-rs/src/grpc.rs index a07115c1ec..ceb9fbb043 100644 --- a/nexus/flow-rs/src/grpc.rs +++ b/nexus/flow-rs/src/grpc.rs @@ -114,6 +114,7 @@ impl FlowGrpcClient { source_peer: Some(workflow_details.source_peer), destination_peer: Some(workflow_details.destination_peer), flow_config_update, + custom_number_of_syncs:0, }; let response = self.client.flow_state_change(state_change_req).await?; let state_change_response = response.into_inner(); @@ -175,7 +176,7 @@ impl FlowGrpcClient { initial_snapshot_only: job.initial_snapshot_only, script: job.script.clone(), system: system as i32, - idle_timeout_seconds: job.sync_interval.unwrap_or_default(), + idle_timeout_seconds: job.sync_interval.unwrap_or_default() }; self.start_peer_flow(flow_conn_cfg).await diff --git a/protos/route.proto b/protos/route.proto index 61a3b89b75..c4602b499a 100644 --- a/protos/route.proto +++ b/protos/route.proto @@ -25,6 +25,18 @@ message CreateQRepFlowResponse { string workflow_id = 1; } +message CreateCustomFlowRequest { + string flow_job_name = 1; + int32 number_of_syncs = 2; +} + +message CreateCustomFlowResponse { + string flow_job_name = 1; + int32 number_of_syncs = 2; + string error_message = 3; + bool ok = 4; +} + message ShutdownRequest { string workflow_id = 1; string flow_job_name = 2; @@ -216,6 +228,7 @@ message FlowStateChangeRequest { peerdb_peers.Peer destination_peer = 4; // only can be sent in certain situations optional peerdb_flow.FlowConfigUpdate flow_config_update = 5; + int32 customNumberOfSyncs = 6; } message FlowStateChangeResponse { @@ -237,7 +250,7 @@ service FlowService { body: "*" }; } - rpc ValidateCDCMirror(CreateCDCFlowRequest) returns (ValidateCDCMirrorResponse) { + rpc ValidateCDCMirror(CreateCDCFlowRequest) returns (ValidateCDCMirrorResponse) { option (google.api.http) = { post: "/v1/mirrors/cdc/validate", body: "*" @@ -255,6 +268,7 @@ service FlowService { body: "*" }; } + rpc CreateCDCFlow(CreateCDCFlowRequest) returns (CreateCDCFlowResponse) { option (google.api.http) = { post: "/v1/flows/cdc/create", @@ -267,6 +281,12 @@ service FlowService { body: "*" }; } + rpc CustomSyncFlow(CreateCustomFlowRequest) returns (CreateCustomFlowResponse) { + option (google.api.http) = { + post: "/v1/flows/cdc/create/custom", + body: "*" + }; + } rpc GetSchemas(PostgresPeerActivityInfoRequest) returns (PeerSchemasResponse) { option (google.api.http) = { get: "/v1/peers/schemas" }; @@ -294,6 +314,7 @@ service FlowService { rpc GetStatInfo(PostgresPeerActivityInfoRequest) returns (PeerStatResponse) { option (google.api.http) = { get: "/v1/peers/stats/{peer_name}" }; } + rpc ShutdownFlow(ShutdownRequest) returns (ShutdownResponse) { option (google.api.http) = { post: "/v1/mirrors/drop", body: "*" }; } diff --git a/ui/app/mirrors/[mirrorId]/edit/page.tsx b/ui/app/mirrors/[mirrorId]/edit/page.tsx index 9c738fb712..027e6dafa1 100644 --- a/ui/app/mirrors/[mirrorId]/edit/page.tsx +++ b/ui/app/mirrors/[mirrorId]/edit/page.tsx @@ -92,6 +92,7 @@ const EditMirror = ({ params: { mirrorId } }: EditMirrorProps) => { flowConfigUpdate: { cdcFlowConfigUpdate: { ...config, additionalTables }, }, + customNumberOfSyncs:0 }; const res = await fetch(`/api/mirrors/state_change`, { method: 'POST', diff --git a/ui/app/mirrors/[mirrorId]/handlers.ts b/ui/app/mirrors/[mirrorId]/handlers.ts index bd6e0d3e2f..b486ea81f7 100644 --- a/ui/app/mirrors/[mirrorId]/handlers.ts +++ b/ui/app/mirrors/[mirrorId]/handlers.ts @@ -28,6 +28,7 @@ export const changeFlowState = async ( sourcePeer: mirrorConfig.source, destinationPeer: mirrorConfig.destination, requestedFlowState: flowState, + customNumberOfSyncs:0 }; await fetch(`/api/mirrors/state_change`, { method: 'POST', From 0f081ad9081e0d3c4beadd93155611116b82585b Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 13 May 2024 12:37:32 +0530 Subject: [PATCH 02/31] check limit after normalise --- flow/workflows/cdc_flow.go | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/flow/workflows/cdc_flow.go b/flow/workflows/cdc_flow.go index 4d5ce5110e..9fa071fd5e 100644 --- a/flow/workflows/cdc_flow.go +++ b/flow/workflows/cdc_flow.go @@ -255,14 +255,11 @@ func CDCFlowWorkflow( selector.AddReceive(ctx.Done(), func(_ workflow.ReceiveChannel, _ bool) {}) flowSignalChan.AddToSelector(selector, func(val model.CDCFlowSignalProperties, _ bool) { cdcFlowData := model.FlowSignalHandler(state.ActiveSignal, val.Signal, logger) - slog.Info("value of signal", slog.Any("signal", cdcFlowData.Signal)) - slog.Info("cdc signal val", slog.Any("val", val)) state.ActiveSignal = cdcFlowData.Signal syncCountLimit = val.CustomNumberOfSyncs if syncCountLimit <= 0 { syncCountLimit = MaxSyncsPerCdcFlow } - slog.Info("sync limit reception inside pause", slog.Int("limit", syncCountLimit)) }) addCdcPropertiesSignalListener(ctx, logger, selector, state) @@ -487,12 +484,6 @@ func CDCFlowWorkflow( } logger.Info("normalize finished, finishing") - if syncCount == int(syncCountLimit) { - logger.Info("sync count limit reached, pausing", - slog.Int("limit", syncCountLimit), - slog.Int("count", syncCount)) - state.ActiveSignal = model.PauseSignal - } normFlowFuture = nil restart = true finished = true @@ -523,6 +514,12 @@ func CDCFlowWorkflow( normDoneChan := model.NormalizeDoneSignal.GetSignalChannel(ctx) normDoneChan.Drain() normDoneChan.AddToSelector(mainLoopSelector, func(x struct{}, _ bool) { + if syncCount == int(syncCountLimit) { + logger.Info("sync count limit reached, pausing", + slog.Int("limit", syncCountLimit), + slog.Int("count", syncCount)) + state.ActiveSignal = model.PauseSignal + } if syncFlowFuture != nil { _ = model.NormalizeDoneSignal.SignalChildWorkflow(ctx, syncFlowFuture, x).Get(ctx, nil) } From 9711c1d01e34cebc87c782f31020ece95447a8f9 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 13 May 2024 12:40:13 +0530 Subject: [PATCH 03/31] remove log --- flow/cmd/handler.go | 1 - 1 file changed, 1 deletion(-) diff --git a/flow/cmd/handler.go b/flow/cmd/handler.go index b349000e08..f479a66068 100644 --- a/flow/cmd/handler.go +++ b/flow/cmd/handler.go @@ -436,7 +436,6 @@ func (h *FlowRequestHandler) FlowStateChange( ) } else if req.RequestedFlowState == protos.FlowStatus_STATUS_RUNNING && currState == protos.FlowStatus_STATUS_PAUSED { - slog.Info("Resume handler", slog.Int("customNumberOfSyncs", int(req.CustomNumberOfSyncs))) err = model.FlowSignal.SignalClientWorkflow( ctx, h.temporalClient, From 8b1cfd966b7d391fd6280b7b833574be3964ab82 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 13 May 2024 13:03:54 +0530 Subject: [PATCH 04/31] lint for ui and go --- flow/model/signals.go | 1 + flow/workflows/cdc_flow.go | 2 +- ui/app/mirrors/[mirrorId]/edit/page.tsx | 2 +- ui/app/mirrors/[mirrorId]/handlers.ts | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/flow/model/signals.go b/flow/model/signals.go index e74285f01a..aa59c4db7e 100644 --- a/flow/model/signals.go +++ b/flow/model/signals.go @@ -100,6 +100,7 @@ func (self TypedReceiveChannel[T]) AddToSelector(selector workflow.Selector, f f } type CDCFlowSignal int64 + type CDCFlowSignalProperties struct { Signal CDCFlowSignal CustomNumberOfSyncs int diff --git a/flow/workflows/cdc_flow.go b/flow/workflows/cdc_flow.go index 9fa071fd5e..e67aa90730 100644 --- a/flow/workflows/cdc_flow.go +++ b/flow/workflows/cdc_flow.go @@ -514,7 +514,7 @@ func CDCFlowWorkflow( normDoneChan := model.NormalizeDoneSignal.GetSignalChannel(ctx) normDoneChan.Drain() normDoneChan.AddToSelector(mainLoopSelector, func(x struct{}, _ bool) { - if syncCount == int(syncCountLimit) { + if syncCount == syncCountLimit { logger.Info("sync count limit reached, pausing", slog.Int("limit", syncCountLimit), slog.Int("count", syncCount)) diff --git a/ui/app/mirrors/[mirrorId]/edit/page.tsx b/ui/app/mirrors/[mirrorId]/edit/page.tsx index 027e6dafa1..c6aae6c010 100644 --- a/ui/app/mirrors/[mirrorId]/edit/page.tsx +++ b/ui/app/mirrors/[mirrorId]/edit/page.tsx @@ -92,7 +92,7 @@ const EditMirror = ({ params: { mirrorId } }: EditMirrorProps) => { flowConfigUpdate: { cdcFlowConfigUpdate: { ...config, additionalTables }, }, - customNumberOfSyncs:0 + customNumberOfSyncs: 0, }; const res = await fetch(`/api/mirrors/state_change`, { method: 'POST', diff --git a/ui/app/mirrors/[mirrorId]/handlers.ts b/ui/app/mirrors/[mirrorId]/handlers.ts index b486ea81f7..4b271278f8 100644 --- a/ui/app/mirrors/[mirrorId]/handlers.ts +++ b/ui/app/mirrors/[mirrorId]/handlers.ts @@ -28,7 +28,7 @@ export const changeFlowState = async ( sourcePeer: mirrorConfig.source, destinationPeer: mirrorConfig.destination, requestedFlowState: flowState, - customNumberOfSyncs:0 + customNumberOfSyncs: 0, }; await fetch(`/api/mirrors/state_change`, { method: 'POST', From 097d418cc9435818e8ea5ae8ad3d5f72940a155c Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 13 May 2024 20:29:20 +0530 Subject: [PATCH 05/31] use flow update config instead --- flow/cmd/custom_sync.go | 13 +++++++++---- flow/cmd/handler.go | 9 ++------- flow/e2e/postgres/peer_flow_pg_test.go | 8 ++------ flow/e2e/postgres/qrep_flow_pg_test.go | 8 ++------ flow/model/signals.go | 21 +++++---------------- flow/workflows/cdc_flow.go | 24 +++++++++++------------- flow/workflows/qrep_flow.go | 10 +++++----- flow/workflows/xmin_flow.go | 4 ++-- protos/flow.proto | 2 ++ protos/route.proto | 1 - 10 files changed, 40 insertions(+), 60 deletions(-) diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 25c16fb910..46cb2beb7c 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -78,10 +78,15 @@ func (h *FlowRequestHandler) CustomSyncFlow( // Resume mirror with custom sync number _, err = h.FlowStateChange(ctx, &protos.FlowStateChangeRequest{ - FlowJobName: req.FlowJobName, - RequestedFlowState: protos.FlowStatus_STATUS_RUNNING, - FlowConfigUpdate: nil, - CustomNumberOfSyncs: req.NumberOfSyncs, + FlowJobName: req.FlowJobName, + RequestedFlowState: protos.FlowStatus_STATUS_RUNNING, + FlowConfigUpdate: &protos.FlowConfigUpdate{ + Update: &protos.FlowConfigUpdate_CdcFlowConfigUpdate{ + CdcFlowConfigUpdate: &protos.CDCFlowConfigUpdate{ + NumberOfSyncs: req.NumberOfSyncs, + }, + }, + }, }) if err != nil { return &protos.CreateCustomFlowResponse{ diff --git a/flow/cmd/handler.go b/flow/cmd/handler.go index f479a66068..f338b2185c 100644 --- a/flow/cmd/handler.go +++ b/flow/cmd/handler.go @@ -430,9 +430,7 @@ func (h *FlowRequestHandler) FlowStateChange( h.temporalClient, workflowID, "", - model.CDCFlowSignalProperties{ - Signal: model.PauseSignal, - }, + model.PauseSignal, ) } else if req.RequestedFlowState == protos.FlowStatus_STATUS_RUNNING && currState == protos.FlowStatus_STATUS_PAUSED { @@ -441,10 +439,7 @@ func (h *FlowRequestHandler) FlowStateChange( h.temporalClient, workflowID, "", - model.CDCFlowSignalProperties{ - Signal: model.NoopSignal, - CustomNumberOfSyncs: int(req.CustomNumberOfSyncs), - }, + model.NoopSignal, ) } else if req.RequestedFlowState == protos.FlowStatus_STATUS_TERMINATED && (currState != protos.FlowStatus_STATUS_TERMINATED) { diff --git a/flow/e2e/postgres/peer_flow_pg_test.go b/flow/e2e/postgres/peer_flow_pg_test.go index 74dd118255..68d6d950ee 100644 --- a/flow/e2e/postgres/peer_flow_pg_test.go +++ b/flow/e2e/postgres/peer_flow_pg_test.go @@ -991,9 +991,7 @@ func (s PeerFlowE2ETestSuitePG) Test_Dynamic_Mirror_Config_Via_Signals() { if !s.t.Failed() { addRows(1) - e2e.SignalWorkflow(env, model.FlowSignal, model.CDCFlowSignalProperties{ - Signal: model.PauseSignal, - }) + e2e.SignalWorkflow(env, model.FlowSignal, model.PauseSignal) addRows(1) e2e.EnvWaitFor(s.t, env, 1*time.Minute, "paused workflow", func() bool { // keep adding 1 more row - finishing another sync @@ -1017,9 +1015,7 @@ func (s PeerFlowE2ETestSuitePG) Test_Dynamic_Mirror_Config_Via_Signals() { // add rows to both tables before resuming - should handle addRows(18) - e2e.SignalWorkflow(env, model.FlowSignal, model.CDCFlowSignalProperties{ - Signal: model.NoopSignal, - }) + e2e.SignalWorkflow(env, model.FlowSignal, model.NoopSignal) e2e.EnvWaitFor(s.t, env, 1*time.Minute, "resumed workflow", func() bool { return getFlowStatus() == protos.FlowStatus_STATUS_RUNNING diff --git a/flow/e2e/postgres/qrep_flow_pg_test.go b/flow/e2e/postgres/qrep_flow_pg_test.go index a36b93be2a..eb67b43592 100644 --- a/flow/e2e/postgres/qrep_flow_pg_test.go +++ b/flow/e2e/postgres/qrep_flow_pg_test.go @@ -416,9 +416,7 @@ func (s PeerFlowE2ETestSuitePG) Test_Pause() { tc := e2e.NewTemporalClient(s.t) env := e2e.RunQRepFlowWorkflow(tc, config) - e2e.SignalWorkflow(env, model.FlowSignal, model.CDCFlowSignalProperties{ - Signal: model.PauseSignal, - }) + e2e.SignalWorkflow(env, model.FlowSignal, model.PauseSignal) e2e.EnvWaitFor(s.t, env, 3*time.Minute, "pausing", func() bool { response, err := env.Query(shared.QRepFlowStateQuery) @@ -433,9 +431,7 @@ func (s PeerFlowE2ETestSuitePG) Test_Pause() { } return state.CurrentFlowStatus == protos.FlowStatus_STATUS_PAUSED }) - e2e.SignalWorkflow(env, model.FlowSignal, model.CDCFlowSignalProperties{ - Signal: model.NoopSignal, - }) + e2e.SignalWorkflow(env, model.FlowSignal, model.NoopSignal) e2e.EnvWaitFor(s.t, env, time.Minute, "unpausing", func() bool { response, err := env.Query(shared.QRepFlowStateQuery) if err != nil { diff --git a/flow/model/signals.go b/flow/model/signals.go index aa59c4db7e..53fcf7e1e5 100644 --- a/flow/model/signals.go +++ b/flow/model/signals.go @@ -101,11 +101,6 @@ func (self TypedReceiveChannel[T]) AddToSelector(selector workflow.Selector, f f type CDCFlowSignal int64 -type CDCFlowSignalProperties struct { - Signal CDCFlowSignal - CustomNumberOfSyncs int -} - const ( NoopSignal CDCFlowSignal = iota _ @@ -114,31 +109,25 @@ const ( func FlowSignalHandler(activeSignal CDCFlowSignal, v CDCFlowSignal, logger log.Logger, -) CDCFlowSignalProperties { +) CDCFlowSignal { switch v { case PauseSignal: logger.Info("received pause signal") if activeSignal == NoopSignal { logger.Info("workflow was running, pausing it") - return CDCFlowSignalProperties{ - Signal: v, - } + return v } case NoopSignal: logger.Info("received resume signal") if activeSignal == PauseSignal { logger.Info("workflow was paused, resuming it") - return CDCFlowSignalProperties{ - Signal: v, - } + return v } } - return CDCFlowSignalProperties{ - Signal: activeSignal, - } + return activeSignal } -var FlowSignal = TypedSignal[CDCFlowSignalProperties]{ +var FlowSignal = TypedSignal[CDCFlowSignal]{ Name: "peer-flow-signal", } diff --git a/flow/workflows/cdc_flow.go b/flow/workflows/cdc_flow.go index e67aa90730..766ba055c7 100644 --- a/flow/workflows/cdc_flow.go +++ b/flow/workflows/cdc_flow.go @@ -47,6 +47,7 @@ func NewCDCFlowWorkflowState(cfg *protos.FlowConnectionConfigs) *CDCFlowWorkflow BatchSize: cfg.MaxBatchSize, IdleTimeoutSeconds: cfg.IdleTimeoutSeconds, TableMappings: tableMappings, + NumberOfSyncs: 0, }, } } @@ -172,13 +173,16 @@ func addCdcPropertiesSignalListener( if cdcConfigUpdate.IdleTimeout > 0 { state.SyncFlowOptions.IdleTimeoutSeconds = cdcConfigUpdate.IdleTimeout } + if cdcConfigUpdate.NumberOfSyncs > 0 { + state.SyncFlowOptions.NumberOfSyncs = cdcConfigUpdate.NumberOfSyncs + } // do this irrespective of additional tables being present, for auto unpausing state.FlowConfigUpdate = cdcConfigUpdate - logger.Info("CDC Signal received. Parameters on signal reception:", slog.Int("BatchSize", int(state.SyncFlowOptions.BatchSize)), slog.Int("IdleTimeout", int(state.SyncFlowOptions.IdleTimeoutSeconds)), - slog.Any("AdditionalTables", cdcConfigUpdate.AdditionalTables)) + slog.Any("AdditionalTables", cdcConfigUpdate.AdditionalTables), + slog.Int("NumberOfSyncs", int(state.SyncFlowOptions.NumberOfSyncs))) }) } @@ -253,16 +257,11 @@ func CDCFlowWorkflow( if state.ActiveSignal == model.PauseSignal { selector := workflow.NewNamedSelector(ctx, "PauseLoop") selector.AddReceive(ctx.Done(), func(_ workflow.ReceiveChannel, _ bool) {}) - flowSignalChan.AddToSelector(selector, func(val model.CDCFlowSignalProperties, _ bool) { - cdcFlowData := model.FlowSignalHandler(state.ActiveSignal, val.Signal, logger) - state.ActiveSignal = cdcFlowData.Signal - syncCountLimit = val.CustomNumberOfSyncs - if syncCountLimit <= 0 { - syncCountLimit = MaxSyncsPerCdcFlow - } + flowSignalChan.AddToSelector(selector, func(val model.CDCFlowSignal, _ bool) { + state.ActiveSignal = model.FlowSignalHandler(state.ActiveSignal, val, logger) }) addCdcPropertiesSignalListener(ctx, logger, selector, state) - + syncCountLimit = int(state.SyncFlowOptions.NumberOfSyncs) startTime := workflow.Now(ctx) state.CurrentFlowStatus = protos.FlowStatus_STATUS_PAUSED @@ -489,9 +488,8 @@ func CDCFlowWorkflow( finished = true }) - flowSignalChan.AddToSelector(mainLoopSelector, func(val model.CDCFlowSignalProperties, _ bool) { - cdcFlowData := model.FlowSignalHandler(state.ActiveSignal, val.Signal, logger) - state.ActiveSignal = cdcFlowData.Signal + flowSignalChan.AddToSelector(mainLoopSelector, func(val model.CDCFlowSignal, _ bool) { + state.ActiveSignal = model.FlowSignalHandler(state.ActiveSignal, val, logger) }) syncResultChan := model.SyncResultSignal.GetSignalChannel(ctx) diff --git a/flow/workflows/qrep_flow.go b/flow/workflows/qrep_flow.go index 5202d7bd04..8be2619866 100644 --- a/flow/workflows/qrep_flow.go +++ b/flow/workflows/qrep_flow.go @@ -322,7 +322,7 @@ func (q *QRepFlowExecution) consolidatePartitions(ctx workflow.Context) error { func (q *QRepFlowExecution) waitForNewRows( ctx workflow.Context, - signalChan model.TypedReceiveChannel[model.CDCFlowSignalProperties], + signalChan model.TypedReceiveChannel[model.CDCFlowSignal], lastPartition *protos.QRepPartition, ) error { ctx = workflow.WithChildOptions(ctx, workflow.ChildWorkflowOptions{ @@ -336,8 +336,8 @@ func (q *QRepFlowExecution) waitForNewRows( var newRows bool var waitErr error waitSelector := workflow.NewNamedSelector(ctx, "WaitForRows") - signalChan.AddToSelector(waitSelector, func(val model.CDCFlowSignalProperties, _ bool) { - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, q.logger).Signal + signalChan.AddToSelector(waitSelector, func(val model.CDCFlowSignal, _ bool) { + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, q.logger) }) waitSelector.AddFuture(future, func(f workflow.Future) { newRows = true @@ -541,7 +541,7 @@ func QRepFlowWorkflow( // only place we block on receive, so signal processing is immediate val, ok, _ := signalChan.ReceiveWithTimeout(ctx, 1*time.Minute) if ok { - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, q.logger).Signal + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, q.logger) } else if err := ctx.Err(); err != nil { return err } @@ -617,7 +617,7 @@ func QRepFlowWorkflow( if !ok { break } - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, q.logger).Signal + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, q.logger) } logger.Info("Continuing as new workflow", diff --git a/flow/workflows/xmin_flow.go b/flow/workflows/xmin_flow.go index f24d02c68b..4cd6deece7 100644 --- a/flow/workflows/xmin_flow.go +++ b/flow/workflows/xmin_flow.go @@ -42,7 +42,7 @@ func XminFlowWorkflow( // only place we block on receive, so signal processing is immediate val, ok, _ := signalChan.ReceiveWithTimeout(ctx, 1*time.Minute) if ok { - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, logger).Signal + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, logger) } else if err := ctx.Err(); err != nil { return err } @@ -109,7 +109,7 @@ func XminFlowWorkflow( if !ok { break } - q.activeSignal = model.FlowSignalHandler(q.activeSignal, val.Signal, q.logger).Signal + q.activeSignal = model.FlowSignalHandler(q.activeSignal, val, q.logger) } logger.Info("Continuing as new workflow", diff --git a/protos/flow.proto b/protos/flow.proto index 4d4343f71a..d092563ddc 100644 --- a/protos/flow.proto +++ b/protos/flow.proto @@ -111,6 +111,7 @@ message SyncFlowOptions { map src_table_id_name_mapping = 4; map table_name_schema_mapping = 5; repeated TableMapping table_mappings = 6; + int32 number_of_syncs = 7; } message StartNormalizeInput { @@ -371,6 +372,7 @@ message CDCFlowConfigUpdate { repeated TableMapping additional_tables = 1; uint32 batch_size = 2; uint64 idle_timeout = 3; + int32 number_of_syncs = 4; } message QRepFlowConfigUpdate { diff --git a/protos/route.proto b/protos/route.proto index c4602b499a..c7b4ceb0d4 100644 --- a/protos/route.proto +++ b/protos/route.proto @@ -228,7 +228,6 @@ message FlowStateChangeRequest { peerdb_peers.Peer destination_peer = 4; // only can be sent in certain situations optional peerdb_flow.FlowConfigUpdate flow_config_update = 5; - int32 customNumberOfSyncs = 6; } message FlowStateChangeResponse { From 8ba424f15538ced0857ac2117d1d4326b18e918e Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Tue, 14 May 2024 16:47:08 +0530 Subject: [PATCH 06/31] fix the wiring --- flow/workflows/cdc_flow.go | 2 +- nexus/flow-rs/src/grpc.rs | 3 +-- ui/app/mirrors/[mirrorId]/edit/page.tsx | 3 ++- ui/app/mirrors/[mirrorId]/handlers.ts | 1 - 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/flow/workflows/cdc_flow.go b/flow/workflows/cdc_flow.go index 766ba055c7..c44587bb2a 100644 --- a/flow/workflows/cdc_flow.go +++ b/flow/workflows/cdc_flow.go @@ -261,7 +261,6 @@ func CDCFlowWorkflow( state.ActiveSignal = model.FlowSignalHandler(state.ActiveSignal, val, logger) }) addCdcPropertiesSignalListener(ctx, logger, selector, state) - syncCountLimit = int(state.SyncFlowOptions.NumberOfSyncs) startTime := workflow.Now(ctx) state.CurrentFlowStatus = protos.FlowStatus_STATUS_PAUSED @@ -287,6 +286,7 @@ func CDCFlowWorkflow( if err != nil { return state, err } + syncCountLimit = int(state.SyncFlowOptions.NumberOfSyncs) logger.Info("wiping flow state after state update processing") // finished processing, wipe it state.FlowConfigUpdate = nil diff --git a/nexus/flow-rs/src/grpc.rs b/nexus/flow-rs/src/grpc.rs index ceb9fbb043..a07115c1ec 100644 --- a/nexus/flow-rs/src/grpc.rs +++ b/nexus/flow-rs/src/grpc.rs @@ -114,7 +114,6 @@ impl FlowGrpcClient { source_peer: Some(workflow_details.source_peer), destination_peer: Some(workflow_details.destination_peer), flow_config_update, - custom_number_of_syncs:0, }; let response = self.client.flow_state_change(state_change_req).await?; let state_change_response = response.into_inner(); @@ -176,7 +175,7 @@ impl FlowGrpcClient { initial_snapshot_only: job.initial_snapshot_only, script: job.script.clone(), system: system as i32, - idle_timeout_seconds: job.sync_interval.unwrap_or_default() + idle_timeout_seconds: job.sync_interval.unwrap_or_default(), }; self.start_peer_flow(flow_conn_cfg).await diff --git a/ui/app/mirrors/[mirrorId]/edit/page.tsx b/ui/app/mirrors/[mirrorId]/edit/page.tsx index c6aae6c010..3a2f1df7e1 100644 --- a/ui/app/mirrors/[mirrorId]/edit/page.tsx +++ b/ui/app/mirrors/[mirrorId]/edit/page.tsx @@ -38,6 +38,7 @@ const EditMirror = ({ params: { mirrorId } }: EditMirrorProps) => { batchSize: defaultBatchSize, idleTimeout: defaultIdleTimeout, additionalTables: [], + numberOfSyncs: 0, }); const { push } = useRouter(); @@ -53,6 +54,7 @@ const EditMirror = ({ params: { mirrorId } }: EditMirrorProps) => { (res as MirrorStatusResponse).cdcStatus?.config?.idleTimeoutSeconds || defaultIdleTimeout, additionalTables: [], + numberOfSyncs: 0, }); }); }, [mirrorId, defaultBatchSize, defaultIdleTimeout]); @@ -92,7 +94,6 @@ const EditMirror = ({ params: { mirrorId } }: EditMirrorProps) => { flowConfigUpdate: { cdcFlowConfigUpdate: { ...config, additionalTables }, }, - customNumberOfSyncs: 0, }; const res = await fetch(`/api/mirrors/state_change`, { method: 'POST', diff --git a/ui/app/mirrors/[mirrorId]/handlers.ts b/ui/app/mirrors/[mirrorId]/handlers.ts index 4b271278f8..bd6e0d3e2f 100644 --- a/ui/app/mirrors/[mirrorId]/handlers.ts +++ b/ui/app/mirrors/[mirrorId]/handlers.ts @@ -28,7 +28,6 @@ export const changeFlowState = async ( sourcePeer: mirrorConfig.source, destinationPeer: mirrorConfig.destination, requestedFlowState: flowState, - customNumberOfSyncs: 0, }; await fetch(`/api/mirrors/state_change`, { method: 'POST', From 65dbf822f490eb3b3e671a641b438252b4759119 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 27 May 2024 18:59:54 +0530 Subject: [PATCH 07/31] refactoring and status endpoint --- flow/cmd/custom_sync.go | 61 +++++++++++++++++++++++++++++++++++++++ flow/cmd/mirror_status.go | 48 ++++++++++++++++++++++++++++++ protos/route.proto | 12 +++++++- 3 files changed, 120 insertions(+), 1 deletion(-) diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 46cb2beb7c..07e3f4ee66 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -3,16 +3,62 @@ package cmd import ( "context" "fmt" + "log/slog" "github.com/PeerDB-io/peer-flow/generated/protos" + "github.com/PeerDB-io/peer-flow/peerdbenv" peerflow "github.com/PeerDB-io/peer-flow/workflows" + + "google.golang.org/grpc/metadata" + //sha256 + "crypto/sha256" ) const peerdbPauseGuideDocLink = "https://docs.peerdb.io/features/pause-mirror" +func AuthenticateSyncRequest(ctx context.Context) error { + var values []string + var token string + + md, ok := metadata.FromIncomingContext(ctx) + if ok { + values = md.Get("authorization") + } + + if len(values) > 0 { + token = values[0] + } + + deploymentUid := peerdbenv.PeerDBDeploymentUID() + hash := sha256.New() + _, err := hash.Write([]byte(deploymentUid)) + if err != nil { + slog.Error("Server error: unable to verify authorization", slog.Any("error", err)) + return fmt.Errorf("Server error: unable to verify authorization. Please try again.") + } + + deploymentUidHashed := fmt.Sprintf("%x", hash.Sum(nil)) + if token != "Bearer "+deploymentUidHashed { + slog.Error("Unauthorized: invalid authorization token", slog.String("token", token)) + return fmt.Errorf("Unauthorized: invalid authorization token. Please check the token and try again.") + } + + return nil +} + func (h *FlowRequestHandler) CustomSyncFlow( ctx context.Context, req *protos.CreateCustomFlowRequest, ) (*protos.CreateCustomFlowResponse, error) { + err := AuthenticateSyncRequest(ctx) + if err != nil { + return &protos.CreateCustomFlowResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: err.Error(), + Ok: false, + }, nil + } + // ---- REQUEST VALIDATION ---- if req.FlowJobName == "" { return &protos.CreateCustomFlowResponse{ @@ -24,6 +70,8 @@ func (h *FlowRequestHandler) CustomSyncFlow( } if req.NumberOfSyncs <= 0 || req.NumberOfSyncs > peerflow.MaxSyncsPerCdcFlow { + slog.Error("Invalid sync number request", + slog.Any("requested_number_of_syncs", req.NumberOfSyncs)) return &protos.CreateCustomFlowResponse{ FlowJobName: req.FlowJobName, NumberOfSyncs: 0, @@ -35,6 +83,7 @@ func (h *FlowRequestHandler) CustomSyncFlow( mirrorExists, err := h.CheckIfMirrorNameExists(ctx, req.FlowJobName) if err != nil { + slog.Error("Server error: unable to check if mirror exists", slog.Any("error", err)) return &protos.CreateCustomFlowResponse{ FlowJobName: req.FlowJobName, NumberOfSyncs: 0, @@ -43,6 +92,7 @@ func (h *FlowRequestHandler) CustomSyncFlow( }, nil } if !mirrorExists { + slog.Error("Mirror does not exist", slog.Any("mirror_name", req.FlowJobName)) return &protos.CreateCustomFlowResponse{ FlowJobName: req.FlowJobName, NumberOfSyncs: 0, @@ -55,6 +105,9 @@ func (h *FlowRequestHandler) CustomSyncFlow( FlowJobName: req.FlowJobName, }) if mirrorStatusResponse.ErrorMessage != "" { + slog.Error("Server error: unable to check the status of mirror", + slog.Any("mirror", req.FlowJobName), + slog.Any("error", mirrorStatusResponse.ErrorMessage)) return &protos.CreateCustomFlowResponse{ FlowJobName: req.FlowJobName, NumberOfSyncs: 0, @@ -65,6 +118,7 @@ func (h *FlowRequestHandler) CustomSyncFlow( } if mirrorStatusResponse.CurrentFlowState != protos.FlowStatus_STATUS_PAUSED { + slog.Error("Mirror is not paused", slog.Any("mirror", req.FlowJobName)) return &protos.CreateCustomFlowResponse{ FlowJobName: req.FlowJobName, NumberOfSyncs: 0, @@ -89,6 +143,9 @@ func (h *FlowRequestHandler) CustomSyncFlow( }, }) if err != nil { + slog.Error("Unable to kick off custom sync for mirror", + slog.Any("mirror", req.FlowJobName), + slog.Any("error", err)) return &protos.CreateCustomFlowResponse{ FlowJobName: req.FlowJobName, NumberOfSyncs: 0, @@ -98,6 +155,10 @@ func (h *FlowRequestHandler) CustomSyncFlow( }, nil } + slog.Info("Custom sync started for mirror", + slog.String("mirror", req.FlowJobName), + slog.Int("number_of_syncs", int(req.NumberOfSyncs))) + return &protos.CreateCustomFlowResponse{ FlowJobName: req.FlowJobName, NumberOfSyncs: req.NumberOfSyncs, diff --git a/flow/cmd/mirror_status.go b/flow/cmd/mirror_status.go index 9e72ce6466..b9b8722b60 100644 --- a/flow/cmd/mirror_status.go +++ b/flow/cmd/mirror_status.go @@ -407,3 +407,51 @@ func (h *FlowRequestHandler) getCDCWorkflowState(ctx context.Context, } return &state, nil } + +func (h *FlowRequestHandler) getRunningStatus( + ctx context.Context, + flowName string, +) (*protos.FlowStatus, error) { + workflowID, err := h.getWorkflowID(ctx, flowName) + if err != nil { + return nil, err + } + + currState, err := h.getWorkflowStatus(ctx, workflowID) + if err != nil { + return nil, err + } + + return &currState, nil +} + +func (h *FlowRequestHandler) CDCMirrorStatus(ctx context.Context, req *protos.MirrorStatusRequest) (*protos.CDCStatusResponse, error) { + err := AuthenticateSyncRequest(ctx) + if err != nil { + return &protos.CDCStatusResponse{ + FlowJobName: req.FlowJobName, + CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, + ErrorMessage: "Unauthorized: invalid authorization token. Please check the token and try again.", + Ok: false, + }, nil + } + + cdcState, err := h.getRunningStatus(ctx, req.FlowJobName) + if err != nil { + slog.Error("Error fetching status of mirror", + slog.String(string(shared.FlowNameKey), req.FlowJobName), + slog.Any("error", err)) + return &protos.CDCStatusResponse{ + FlowJobName: req.FlowJobName, + CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, + Ok: false, + ErrorMessage: fmt.Sprintf("Error fetching status of mirror %s: %v", req.FlowJobName, err)}, err + } + + return &protos.CDCStatusResponse{ + FlowJobName: req.FlowJobName, + CurrentFlowState: *cdcState, + Ok: true, + ErrorMessage: "", + }, nil +} diff --git a/protos/route.proto b/protos/route.proto index c7b4ceb0d4..29a5dc8b22 100644 --- a/protos/route.proto +++ b/protos/route.proto @@ -217,6 +217,13 @@ message MirrorStatusResponse { peerdb_flow.FlowStatus current_flow_state = 5; } +message CDCStatusResponse { + string flow_job_name = 1; + peerdb_flow.FlowStatus current_flow_state = 2; + string error_message = 3; + bool ok = 4; +} + message ValidateCDCMirrorResponse{ bool ok = 1; } @@ -282,7 +289,7 @@ service FlowService { } rpc CustomSyncFlow(CreateCustomFlowRequest) returns (CreateCustomFlowResponse) { option (google.api.http) = { - post: "/v1/flows/cdc/create/custom", + post: "/v1/flows/cdc/sync", body: "*" }; } @@ -323,6 +330,9 @@ service FlowService { rpc MirrorStatus(MirrorStatusRequest) returns (MirrorStatusResponse) { option (google.api.http) = { get: "/v1/mirrors/{flow_job_name}" }; } + rpc CDCMirrorStatus(MirrorStatusRequest) returns (CDCStatusResponse) { + option (google.api.http) = { get: "/v1/flows/cdc/{flow_job_name}" }; + } rpc GetVersion(PeerDBVersionRequest) returns (PeerDBVersionResponse) { option (google.api.http) = { get: "/v1/version" }; From 22b472b041b18f432ef2c9bad59c375e087bb02c Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 27 May 2024 19:34:22 +0530 Subject: [PATCH 08/31] lint --- flow/cmd/custom_sync.go | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 07e3f4ee66..51f13c48f6 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -2,6 +2,8 @@ package cmd import ( "context" + "encoding/hex" + "errors" "fmt" "log/slog" @@ -9,9 +11,9 @@ import ( "github.com/PeerDB-io/peer-flow/peerdbenv" peerflow "github.com/PeerDB-io/peer-flow/workflows" - "google.golang.org/grpc/metadata" - //sha256 "crypto/sha256" + + "google.golang.org/grpc/metadata" ) const peerdbPauseGuideDocLink = "https://docs.peerdb.io/features/pause-mirror" @@ -34,13 +36,13 @@ func AuthenticateSyncRequest(ctx context.Context) error { _, err := hash.Write([]byte(deploymentUid)) if err != nil { slog.Error("Server error: unable to verify authorization", slog.Any("error", err)) - return fmt.Errorf("Server error: unable to verify authorization. Please try again.") + return errors.New("server error: unable to verify authorization. Please try again.") } - deploymentUidHashed := fmt.Sprintf("%x", hash.Sum(nil)) + deploymentUidHashed := hex.EncodeToString(hash.Sum(nil)) if token != "Bearer "+deploymentUidHashed { slog.Error("Unauthorized: invalid authorization token", slog.String("token", token)) - return fmt.Errorf("Unauthorized: invalid authorization token. Please check the token and try again.") + return errors.New("unauthorized: invalid authorization token. Please check the token and try again.") } return nil From e21dcde90ba8240b2b7c4af8c5cb04e2d735c943 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 27 May 2024 19:54:35 +0530 Subject: [PATCH 09/31] revert core.go --- flow/connectors/core.go | 3 --- 1 file changed, 3 deletions(-) diff --git a/flow/connectors/core.go b/flow/connectors/core.go index 613aaa3a39..0f25c4509c 100644 --- a/flow/connectors/core.go +++ b/flow/connectors/core.go @@ -248,9 +248,6 @@ type RenameTablesConnector interface { } func GetConnector(ctx context.Context, config *protos.Peer) (Connector, error) { - if config == nil { - return nil, errors.ErrUnsupported - } switch inner := config.Config.(type) { case *protos.Peer_PostgresConfig: return connpostgres.NewPostgresConnector(ctx, inner.PostgresConfig) From fda6904c604714f01a7376f5039b923c68236f18 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 27 May 2024 20:25:55 +0530 Subject: [PATCH 10/31] change authentication to use password --- docker-compose-dev.yml | 1 + docker-compose.yml | 1 + flow/cmd/custom_sync.go | 11 ++++++----- flow/peerdbenv/config.go | 5 +++++ 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 1868c755bf..166196c0f2 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -131,6 +131,7 @@ services: - 8113:8113 environment: <<: [*catalog-config, *flow-worker-env, *minio-config] + PEERDB_PASSWORD: peerdb depends_on: temporal-admin-tools: condition: service_healthy diff --git a/docker-compose.yml b/docker-compose.yml index 1b1617233d..154e4ba70d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -117,6 +117,7 @@ services: - 8113:8113 environment: <<: [*catalog-config, *flow-worker-env, *minio-config] + PEERDB_PASSWORD: peerdb depends_on: temporal-admin-tools: condition: service_healthy diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 51f13c48f6..0026229119 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -31,17 +31,18 @@ func AuthenticateSyncRequest(ctx context.Context) error { token = values[0] } - deploymentUid := peerdbenv.PeerDBDeploymentUID() + peerdbPassword := peerdbenv.PeerDBPassword() hash := sha256.New() - _, err := hash.Write([]byte(deploymentUid)) + _, err := hash.Write([]byte(peerdbPassword)) if err != nil { slog.Error("Server error: unable to verify authorization", slog.Any("error", err)) return errors.New("server error: unable to verify authorization. Please try again.") } - deploymentUidHashed := hex.EncodeToString(hash.Sum(nil)) - if token != "Bearer "+deploymentUidHashed { - slog.Error("Unauthorized: invalid authorization token", slog.String("token", token)) + passwordHashed := hex.EncodeToString(hash.Sum(nil)) + if token != "Bearer "+passwordHashed { + slog.Error("Unauthorized: invalid authorization token", slog.String("token", token), + slog.String("expected_token", "Bearer "+passwordHashed)) return errors.New("unauthorized: invalid authorization token. Please check the token and try again.") } diff --git a/flow/peerdbenv/config.go b/flow/peerdbenv/config.go index bcf1ac050e..6104e44b2a 100644 --- a/flow/peerdbenv/config.go +++ b/flow/peerdbenv/config.go @@ -21,6 +21,11 @@ func PeerDBDeploymentUID() string { return GetEnvString("PEERDB_DEPLOYMENT_UID", "") } +// PEERDB_PASSWORD +func PeerDBPassword() string { + return GetEnvString("PEERDB_PASSWORD", "") +} + func PeerFlowTaskQueueName(taskQueueID shared.TaskQueueID) string { deploymentUID := PeerDBDeploymentUID() if deploymentUID == "" { From 46bf128cacfa4662ab6169335360a7a2faf799f1 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Mon, 27 May 2024 23:27:45 +0530 Subject: [PATCH 11/31] suggested changes --- flow/cmd/custom_sync.go | 130 +++++++++++++++++++--------------------- protos/route.proto | 6 +- 2 files changed, 66 insertions(+), 70 deletions(-) diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 0026229119..669c4986a8 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "log/slog" + "sync" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/peerdbenv" @@ -18,6 +19,29 @@ import ( const peerdbPauseGuideDocLink = "https://docs.peerdb.io/features/pause-mirror" +// Memoization map and mutex for storing and accessing hashed passwords +var ( + passwordHashes = make(map[string]string) + hashMutex sync.Mutex +) + +func getHashedPassword() string { + hashMutex.Lock() + defer hashMutex.Unlock() + + password := peerdbenv.PeerDBPassword() + if hashed, exists := passwordHashes[password]; exists { + return hashed + } + + hash := sha256.New() + hash.Write([]byte(password)) + + hashedPassword := hex.EncodeToString(hash.Sum(nil)) + passwordHashes[password] = hashedPassword + return hashedPassword +} + func AuthenticateSyncRequest(ctx context.Context) error { var values []string var token string @@ -31,18 +55,9 @@ func AuthenticateSyncRequest(ctx context.Context) error { token = values[0] } - peerdbPassword := peerdbenv.PeerDBPassword() - hash := sha256.New() - _, err := hash.Write([]byte(peerdbPassword)) - if err != nil { - slog.Error("Server error: unable to verify authorization", slog.Any("error", err)) - return errors.New("server error: unable to verify authorization. Please try again.") - } - - passwordHashed := hex.EncodeToString(hash.Sum(nil)) + passwordHashed := getHashedPassword() if token != "Bearer "+passwordHashed { - slog.Error("Unauthorized: invalid authorization token", slog.String("token", token), - slog.String("expected_token", "Bearer "+passwordHashed)) + slog.Error("Unauthorized: invalid authorization token") return errors.New("unauthorized: invalid authorization token. Please check the token and try again.") } @@ -50,58 +65,44 @@ func AuthenticateSyncRequest(ctx context.Context) error { } func (h *FlowRequestHandler) CustomSyncFlow( - ctx context.Context, req *protos.CreateCustomFlowRequest, -) (*protos.CreateCustomFlowResponse, error) { + ctx context.Context, req *protos.CreateCustomSyncRequest, +) (*protos.CreateCustomSyncResponse, error) { + errResponse := &protos.CreateCustomSyncResponse{ + FlowJobName: req.FlowJobName, + NumberOfSyncs: 0, + ErrorMessage: "error while processing request", + Ok: false, + } err := AuthenticateSyncRequest(ctx) if err != nil { - return &protos.CreateCustomFlowResponse{ - FlowJobName: req.FlowJobName, - NumberOfSyncs: 0, - ErrorMessage: err.Error(), - Ok: false, - }, nil + errResponse.ErrorMessage = err.Error() + return errResponse, nil } // ---- REQUEST VALIDATION ---- if req.FlowJobName == "" { - return &protos.CreateCustomFlowResponse{ - FlowJobName: req.FlowJobName, - NumberOfSyncs: 0, - ErrorMessage: "Flow job name is not provided", - Ok: false, - }, nil + errResponse.ErrorMessage = "Mirror name cannot be empty." + return errResponse, nil } if req.NumberOfSyncs <= 0 || req.NumberOfSyncs > peerflow.MaxSyncsPerCdcFlow { slog.Error("Invalid sync number request", slog.Any("requested_number_of_syncs", req.NumberOfSyncs)) - return &protos.CreateCustomFlowResponse{ - FlowJobName: req.FlowJobName, - NumberOfSyncs: 0, - ErrorMessage: fmt.Sprintf("Sync number request must be between 1 and %d (inclusive). Requested number: %d", - peerflow.MaxSyncsPerCdcFlow, req.NumberOfSyncs), - Ok: false, - }, nil + errResponse.ErrorMessage = fmt.Sprintf("Sync number request must be between 1 and %d (inclusive). Requested number: %d", + peerflow.MaxSyncsPerCdcFlow, req.NumberOfSyncs) + return errResponse, nil } mirrorExists, err := h.CheckIfMirrorNameExists(ctx, req.FlowJobName) if err != nil { slog.Error("Server error: unable to check if mirror exists", slog.Any("error", err)) - return &protos.CreateCustomFlowResponse{ - FlowJobName: req.FlowJobName, - NumberOfSyncs: 0, - ErrorMessage: "Server error: unable to check if mirror " + req.FlowJobName + " exists.", - Ok: false, - }, nil + errResponse.ErrorMessage = "Server error: unable to check if mirror " + req.FlowJobName + " exists." + return errResponse, nil } if !mirrorExists { slog.Error("Mirror does not exist", slog.Any("mirror_name", req.FlowJobName)) - return &protos.CreateCustomFlowResponse{ - FlowJobName: req.FlowJobName, - NumberOfSyncs: 0, - ErrorMessage: req.FlowJobName + "does not exist. This may be because it was dropped.", - Ok: false, - }, nil + errResponse.ErrorMessage = fmt.Sprintf("Mirror %s does not exist", req.FlowJobName) + return errResponse, nil } mirrorStatusResponse, _ := h.MirrorStatus(ctx, &protos.MirrorStatusRequest{ @@ -111,25 +112,24 @@ func (h *FlowRequestHandler) CustomSyncFlow( slog.Error("Server error: unable to check the status of mirror", slog.Any("mirror", req.FlowJobName), slog.Any("error", mirrorStatusResponse.ErrorMessage)) - return &protos.CreateCustomFlowResponse{ - FlowJobName: req.FlowJobName, - NumberOfSyncs: 0, - ErrorMessage: fmt.Sprintf("Server error: unable to check the status of mirror %s: %s", - req.FlowJobName, mirrorStatusResponse.ErrorMessage), - Ok: false, - }, nil + errResponse.ErrorMessage = fmt.Sprintf("Server error: unable to check the status of mirror %s: %s", + req.FlowJobName, mirrorStatusResponse.ErrorMessage) + return errResponse, nil } if mirrorStatusResponse.CurrentFlowState != protos.FlowStatus_STATUS_PAUSED { slog.Error("Mirror is not paused", slog.Any("mirror", req.FlowJobName)) - return &protos.CreateCustomFlowResponse{ - FlowJobName: req.FlowJobName, - NumberOfSyncs: 0, - ErrorMessage: fmt.Sprintf(`Requested mirror %s is not paused. This is a requirement. - The mirror can be paused via PeerDB UI. Please follow %s`, - req.FlowJobName, peerdbPauseGuideDocLink), - Ok: false, - }, nil + errResponse.ErrorMessage = fmt.Sprintf(`Requested mirror %s is not paused. This is a requirement. + The mirror can be paused via PeerDB UI. Please follow %s`, + req.FlowJobName, peerdbPauseGuideDocLink) + return errResponse, nil + } + + // Parallel sync-normalise should not be enabled + parallelSyncNormaliseEnabled := peerdbenv.PeerDBEnableParallelSyncNormalize() + if parallelSyncNormaliseEnabled { + errResponse.ErrorMessage = "Parallel sync-normalise is enabled. Please contact PeerDB support to disable it to proceed." + return errResponse, nil } // ---- REQUEST VALIDATED ---- @@ -149,20 +149,16 @@ func (h *FlowRequestHandler) CustomSyncFlow( slog.Error("Unable to kick off custom sync for mirror", slog.Any("mirror", req.FlowJobName), slog.Any("error", err)) - return &protos.CreateCustomFlowResponse{ - FlowJobName: req.FlowJobName, - NumberOfSyncs: 0, - ErrorMessage: fmt.Sprintf("Unable to kick off sync for mirror %s:%s", - req.FlowJobName, err.Error()), - Ok: false, - }, nil + errResponse.ErrorMessage = fmt.Sprintf("Unable to kick off sync for mirror %s:%s", + req.FlowJobName, err.Error()) + return errResponse, nil } slog.Info("Custom sync started for mirror", slog.String("mirror", req.FlowJobName), slog.Int("number_of_syncs", int(req.NumberOfSyncs))) - return &protos.CreateCustomFlowResponse{ + return &protos.CreateCustomSyncResponse{ FlowJobName: req.FlowJobName, NumberOfSyncs: req.NumberOfSyncs, ErrorMessage: "", diff --git a/protos/route.proto b/protos/route.proto index 29a5dc8b22..d9cf735382 100644 --- a/protos/route.proto +++ b/protos/route.proto @@ -25,12 +25,12 @@ message CreateQRepFlowResponse { string workflow_id = 1; } -message CreateCustomFlowRequest { +message CreateCustomSyncRequest { string flow_job_name = 1; int32 number_of_syncs = 2; } -message CreateCustomFlowResponse { +message CreateCustomSyncResponse { string flow_job_name = 1; int32 number_of_syncs = 2; string error_message = 3; @@ -287,7 +287,7 @@ service FlowService { body: "*" }; } - rpc CustomSyncFlow(CreateCustomFlowRequest) returns (CreateCustomFlowResponse) { + rpc CustomSyncFlow(CreateCustomSyncRequest) returns (CreateCustomSyncResponse) { option (google.api.http) = { post: "/v1/flows/cdc/sync", body: "*" From 7bc715110d9863c8ebd4c495b5e00f3fb2b79bd8 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Wed, 29 May 2024 01:11:19 +0530 Subject: [PATCH 12/31] use existing status endpoint, adapt UI wiring --- flow/cmd/mirror_status.go | 141 +++++++++++++---------------- protos/route.proto | 12 +-- ui/app/api/mirrors/state/route.ts | 12 ++- ui/app/mirrors/[mirrorId]/page.tsx | 9 +- ui/app/utils/passwordFromEnv.ts | 10 ++ 5 files changed, 94 insertions(+), 90 deletions(-) create mode 100644 ui/app/utils/passwordFromEnv.ts diff --git a/flow/cmd/mirror_status.go b/flow/cmd/mirror_status.go index b9b8722b60..0cf5ab243f 100644 --- a/flow/cmd/mirror_status.go +++ b/flow/cmd/mirror_status.go @@ -19,60 +19,97 @@ func (h *FlowRequestHandler) MirrorStatus( ctx context.Context, req *protos.MirrorStatusRequest, ) (*protos.MirrorStatusResponse, error) { - slog.Info("Mirror status endpoint called", slog.String(string(shared.FlowNameKey), req.FlowJobName)) - cdcFlow, err := h.isCDCFlow(ctx, req.FlowJobName) + slog.Info("Mirror status endpoint called", + slog.Bool("includeFlowInfo", req.IncludeFlowInfo), + slog.String(string(shared.FlowNameKey), req.FlowJobName)) + + err := AuthenticateSyncRequest(ctx) if err != nil { - slog.Error("unable to query flow", slog.Any("error", err)) return &protos.MirrorStatusResponse{ - ErrorMessage: "unable to query flow: " + err.Error(), + FlowJobName: req.FlowJobName, + CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, + ErrorMessage: "Unauthorized: invalid authorization token. Please check the token and try again.", + Ok: false, }, nil } workflowID, err := h.getWorkflowID(ctx, req.FlowJobName) if err != nil { return &protos.MirrorStatusResponse{ - ErrorMessage: "unable to get workflow ID " + err.Error(), + FlowJobName: req.FlowJobName, + CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, + ErrorMessage: "unable to get the workflow ID of mirror " + req.FlowJobName, + Ok: false, }, nil } currState, err := h.getWorkflowStatus(ctx, workflowID) if err != nil { return &protos.MirrorStatusResponse{ - ErrorMessage: "unable to get workflow status " + err.Error(), + FlowJobName: req.FlowJobName, + CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, + ErrorMessage: "unable to get the running status of mirror " + req.FlowJobName, + Ok: false, }, nil } - if cdcFlow { - cdcStatus, err := h.CDCFlowStatus(ctx, req) + if req.IncludeFlowInfo { + cdcFlow, err := h.isCDCFlow(ctx, req.FlowJobName) if err != nil { + slog.Error("unable to query flow", slog.Any("error", err)) return &protos.MirrorStatusResponse{ - ErrorMessage: "unable to query flow: " + err.Error(), + FlowJobName: req.FlowJobName, + CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, + ErrorMessage: "unable to determine if mirror" + req.FlowJobName + "is of type CDC.", + Ok: false, }, nil } + if cdcFlow { + cdcStatus, err := h.CDCFlowStatus(ctx, req) + if err != nil { + return &protos.MirrorStatusResponse{ + FlowJobName: req.FlowJobName, + CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, + ErrorMessage: "unable to obtain CDC information for mirror " + req.FlowJobName, + Ok: false, + }, nil + } - return &protos.MirrorStatusResponse{ - FlowJobName: req.FlowJobName, - Status: &protos.MirrorStatusResponse_CdcStatus{ - CdcStatus: cdcStatus, - }, - CurrentFlowState: currState, - }, nil - } else { - qrepStatus, err := h.QRepFlowStatus(ctx, req) - if err != nil { return &protos.MirrorStatusResponse{ - ErrorMessage: "unable to query flow: " + err.Error(), + FlowJobName: req.FlowJobName, + Status: &protos.MirrorStatusResponse_CdcStatus{ + CdcStatus: cdcStatus, + }, + CurrentFlowState: currState, + Ok: true, }, nil - } + } else { + qrepStatus, err := h.QRepFlowStatus(ctx, req) + if err != nil { + return &protos.MirrorStatusResponse{ + FlowJobName: req.FlowJobName, + CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, + ErrorMessage: "unable to obtain snapshot information for mirror " + req.FlowJobName, + Ok: false, + }, nil + } - return &protos.MirrorStatusResponse{ - FlowJobName: req.FlowJobName, - Status: &protos.MirrorStatusResponse_QrepStatus{ - QrepStatus: qrepStatus, - }, - CurrentFlowState: currState, - }, nil + return &protos.MirrorStatusResponse{ + FlowJobName: req.FlowJobName, + Status: &protos.MirrorStatusResponse_QrepStatus{ + QrepStatus: qrepStatus, + }, + CurrentFlowState: currState, + Ok: true, + }, nil + } } + + return &protos.MirrorStatusResponse{ + FlowJobName: req.FlowJobName, + CurrentFlowState: currState, + Ok: true, + }, nil } func (h *FlowRequestHandler) CDCFlowStatus( @@ -407,51 +444,3 @@ func (h *FlowRequestHandler) getCDCWorkflowState(ctx context.Context, } return &state, nil } - -func (h *FlowRequestHandler) getRunningStatus( - ctx context.Context, - flowName string, -) (*protos.FlowStatus, error) { - workflowID, err := h.getWorkflowID(ctx, flowName) - if err != nil { - return nil, err - } - - currState, err := h.getWorkflowStatus(ctx, workflowID) - if err != nil { - return nil, err - } - - return &currState, nil -} - -func (h *FlowRequestHandler) CDCMirrorStatus(ctx context.Context, req *protos.MirrorStatusRequest) (*protos.CDCStatusResponse, error) { - err := AuthenticateSyncRequest(ctx) - if err != nil { - return &protos.CDCStatusResponse{ - FlowJobName: req.FlowJobName, - CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, - ErrorMessage: "Unauthorized: invalid authorization token. Please check the token and try again.", - Ok: false, - }, nil - } - - cdcState, err := h.getRunningStatus(ctx, req.FlowJobName) - if err != nil { - slog.Error("Error fetching status of mirror", - slog.String(string(shared.FlowNameKey), req.FlowJobName), - slog.Any("error", err)) - return &protos.CDCStatusResponse{ - FlowJobName: req.FlowJobName, - CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, - Ok: false, - ErrorMessage: fmt.Sprintf("Error fetching status of mirror %s: %v", req.FlowJobName, err)}, err - } - - return &protos.CDCStatusResponse{ - FlowJobName: req.FlowJobName, - CurrentFlowState: *cdcState, - Ok: true, - ErrorMessage: "", - }, nil -} diff --git a/protos/route.proto b/protos/route.proto index d9cf735382..6e84341399 100644 --- a/protos/route.proto +++ b/protos/route.proto @@ -91,6 +91,7 @@ message CreatePeerResponse { message MirrorStatusRequest { string flow_job_name = 1; + bool include_flow_info = 2; } message PartitionStatus { @@ -215,13 +216,7 @@ message MirrorStatusResponse { } string error_message = 4; peerdb_flow.FlowStatus current_flow_state = 5; -} - -message CDCStatusResponse { - string flow_job_name = 1; - peerdb_flow.FlowStatus current_flow_state = 2; - string error_message = 3; - bool ok = 4; + bool ok = 6; } message ValidateCDCMirrorResponse{ @@ -330,9 +325,6 @@ service FlowService { rpc MirrorStatus(MirrorStatusRequest) returns (MirrorStatusResponse) { option (google.api.http) = { get: "/v1/mirrors/{flow_job_name}" }; } - rpc CDCMirrorStatus(MirrorStatusRequest) returns (CDCStatusResponse) { - option (google.api.http) = { get: "/v1/flows/cdc/{flow_job_name}" }; - } rpc GetVersion(PeerDBVersionRequest) returns (PeerDBVersionResponse) { option (google.api.http) = { get: "/v1/version" }; diff --git a/ui/app/api/mirrors/state/route.ts b/ui/app/api/mirrors/state/route.ts index 5cb63ccfea..5042b50831 100644 --- a/ui/app/api/mirrors/state/route.ts +++ b/ui/app/api/mirrors/state/route.ts @@ -1,3 +1,4 @@ +import { GetHashedPeerDBPasswordFromEnv } from '@/app/utils/passwordFromEnv'; import { MirrorStatusRequest, MirrorStatusResponse, @@ -7,10 +8,17 @@ import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; export async function POST(request: Request) { const body: MirrorStatusRequest = await request.json(); const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const authToken = GetHashedPeerDBPasswordFromEnv(); try { const res: MirrorStatusResponse = await fetch( - `${flowServiceAddr}/v1/mirrors/${body.flowJobName}`, - { cache: 'no-store' } + `${flowServiceAddr}/v1/mirrors/${body.flowJobName}?` + + new URLSearchParams({ + include_flow_info: 'true', + }), + { + cache: 'no-store', + headers: new Headers({ Authorization: `Bearer ${authToken}` }), + } ).then((res) => { return res.json(); }); diff --git a/ui/app/mirrors/[mirrorId]/page.tsx b/ui/app/mirrors/[mirrorId]/page.tsx index d7a78f9187..2331ce72ff 100644 --- a/ui/app/mirrors/[mirrorId]/page.tsx +++ b/ui/app/mirrors/[mirrorId]/page.tsx @@ -1,4 +1,5 @@ import { SyncStatusRow } from '@/app/dto/MirrorsDTO'; +import { GetHashedPeerDBPasswordFromEnv } from '@/app/utils/passwordFromEnv'; import prisma from '@/app/utils/prisma'; import MirrorActions from '@/components/MirrorActionsDropdown'; import { FlowConnectionConfigs, FlowStatus } from '@/grpc_generated/flow'; @@ -18,12 +19,16 @@ type EditMirrorProps = { function getMirrorStatusUrl(mirrorId: string) { let base = GetFlowHttpAddressFromEnv(); - return `${base}/v1/mirrors/${mirrorId}`; + return `${base}/v1/mirrors/${mirrorId}?include_flow_info=true`; } async function getMirrorStatus(mirrorId: string) { const url = getMirrorStatusUrl(mirrorId); - const resp = await fetch(url, { cache: 'no-store' }); + const hashedPassword = GetHashedPeerDBPasswordFromEnv(); + const resp = await fetch(url, { + cache: 'no-store', + headers: { Authorization: `Bearer ${hashedPassword}` }, + }); const json = await resp.json(); return json; } diff --git a/ui/app/utils/passwordFromEnv.ts b/ui/app/utils/passwordFromEnv.ts new file mode 100644 index 0000000000..5cea4db23c --- /dev/null +++ b/ui/app/utils/passwordFromEnv.ts @@ -0,0 +1,10 @@ +import { createHash } from 'crypto'; +import 'server-only'; + +export function GetHashedPeerDBPasswordFromEnv() { + const password = process.env.PEERDB_PASSWORD!; + const hash = createHash('sha256'); + hash.update(password); + const hashedPassword = hash.digest('hex'); + return hashedPassword; +} From aa0b33eba656a1791e92240e5b579ebe8465d3a9 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Wed, 29 May 2024 19:21:41 +0530 Subject: [PATCH 13/31] use bcrypt instead --- flow/cmd/custom_sync.go | 36 +-- ui/app/api/mirrors/state/route.ts | 4 +- ui/app/mirrors/[mirrorId]/page.tsx | 6 +- ui/app/utils/apitoken.ts | 12 + ui/app/utils/passwordFromEnv.ts | 10 - ui/package-lock.json | 389 +++++++++++++++++++++++++++-- ui/package.json | 2 + 7 files changed, 397 insertions(+), 62 deletions(-) create mode 100644 ui/app/utils/apitoken.ts delete mode 100644 ui/app/utils/passwordFromEnv.ts diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 669c4986a8..93007ed2a3 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -2,46 +2,21 @@ package cmd import ( "context" - "encoding/hex" "errors" "fmt" "log/slog" - "sync" + "strings" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/peerdbenv" peerflow "github.com/PeerDB-io/peer-flow/workflows" - - "crypto/sha256" + "golang.org/x/crypto/bcrypt" "google.golang.org/grpc/metadata" ) const peerdbPauseGuideDocLink = "https://docs.peerdb.io/features/pause-mirror" -// Memoization map and mutex for storing and accessing hashed passwords -var ( - passwordHashes = make(map[string]string) - hashMutex sync.Mutex -) - -func getHashedPassword() string { - hashMutex.Lock() - defer hashMutex.Unlock() - - password := peerdbenv.PeerDBPassword() - if hashed, exists := passwordHashes[password]; exists { - return hashed - } - - hash := sha256.New() - hash.Write([]byte(password)) - - hashedPassword := hex.EncodeToString(hash.Sum(nil)) - passwordHashes[password] = hashedPassword - return hashedPassword -} - func AuthenticateSyncRequest(ctx context.Context) error { var values []string var token string @@ -55,10 +30,11 @@ func AuthenticateSyncRequest(ctx context.Context) error { token = values[0] } - passwordHashed := getHashedPassword() - if token != "Bearer "+passwordHashed { + password := peerdbenv.PeerDBPassword() + _, hashedKey, _ := strings.Cut(token, " ") + if bcrypt.CompareHashAndPassword([]byte(hashedKey), []byte(password)) != nil { slog.Error("Unauthorized: invalid authorization token") - return errors.New("unauthorized: invalid authorization token. Please check the token and try again.") + return errors.New("unauthorized: invalid authorization token. Please check the token and try again") } return nil diff --git a/ui/app/api/mirrors/state/route.ts b/ui/app/api/mirrors/state/route.ts index 5042b50831..ba5d7b0a03 100644 --- a/ui/app/api/mirrors/state/route.ts +++ b/ui/app/api/mirrors/state/route.ts @@ -1,4 +1,4 @@ -import { GetHashedPeerDBPasswordFromEnv } from '@/app/utils/passwordFromEnv'; +import { GetAPIToken } from '@/app/utils/apitoken'; import { MirrorStatusRequest, MirrorStatusResponse, @@ -8,7 +8,7 @@ import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; export async function POST(request: Request) { const body: MirrorStatusRequest = await request.json(); const flowServiceAddr = GetFlowHttpAddressFromEnv(); - const authToken = GetHashedPeerDBPasswordFromEnv(); + const authToken = GetAPIToken(); try { const res: MirrorStatusResponse = await fetch( `${flowServiceAddr}/v1/mirrors/${body.flowJobName}?` + diff --git a/ui/app/mirrors/[mirrorId]/page.tsx b/ui/app/mirrors/[mirrorId]/page.tsx index 2331ce72ff..bfdbc67c6e 100644 --- a/ui/app/mirrors/[mirrorId]/page.tsx +++ b/ui/app/mirrors/[mirrorId]/page.tsx @@ -1,5 +1,5 @@ import { SyncStatusRow } from '@/app/dto/MirrorsDTO'; -import { GetHashedPeerDBPasswordFromEnv } from '@/app/utils/passwordFromEnv'; +import { GetAPIToken } from '@/app/utils/apitoken'; import prisma from '@/app/utils/prisma'; import MirrorActions from '@/components/MirrorActionsDropdown'; import { FlowConnectionConfigs, FlowStatus } from '@/grpc_generated/flow'; @@ -24,10 +24,10 @@ function getMirrorStatusUrl(mirrorId: string) { async function getMirrorStatus(mirrorId: string) { const url = getMirrorStatusUrl(mirrorId); - const hashedPassword = GetHashedPeerDBPasswordFromEnv(); + const apiToken = GetAPIToken(); const resp = await fetch(url, { cache: 'no-store', - headers: { Authorization: `Bearer ${hashedPassword}` }, + headers: { Authorization: `Bearer ${apiToken}` }, }); const json = await resp.json(); return json; diff --git a/ui/app/utils/apitoken.ts b/ui/app/utils/apitoken.ts new file mode 100644 index 0000000000..bb27cf1ddf --- /dev/null +++ b/ui/app/utils/apitoken.ts @@ -0,0 +1,12 @@ +import 'server-only'; +import bcrypt from "bcrypt"; + +function hashPassword(password:string, salt:number) { + var hashed = bcrypt.hashSync(password, salt); // GOOD + return hashed; +} + +export function GetAPIToken() { + const password = process.env.PEERDB_PASSWORD!; + return hashPassword(password, 10); +} diff --git a/ui/app/utils/passwordFromEnv.ts b/ui/app/utils/passwordFromEnv.ts deleted file mode 100644 index 5cea4db23c..0000000000 --- a/ui/app/utils/passwordFromEnv.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { createHash } from 'crypto'; -import 'server-only'; - -export function GetHashedPeerDBPasswordFromEnv() { - const password = process.env.PEERDB_PASSWORD!; - const hash = createHash('sha256'); - hash.update(password); - const hashedPassword = hash.digest('hex'); - return hashedPassword; -} diff --git a/ui/package-lock.json b/ui/package-lock.json index 09adbf000f..f57bc77244 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -30,6 +30,7 @@ "@types/node": "^20.12.12", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", + "bcrypt": "^5.1.1", "classnames": "^2.5.1", "long": "^5.2.3", "lucide-react": "^0.379.0", @@ -51,6 +52,7 @@ "zod": "^3.23.8" }, "devDependencies": { + "@types/bcrypt": "^5.0.2", "autoprefixer": "^10.4.19", "copy-webpack-plugin": "^12.0.2", "eslint": "^8.57.0", @@ -720,6 +722,25 @@ "url": "https://opencollective.com/js-sdsl" } }, + "node_modules/@mapbox/node-pre-gyp": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", + "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", + "dependencies": { + "detect-libc": "^2.0.0", + "https-proxy-agent": "^5.0.0", + "make-dir": "^3.1.0", + "node-fetch": "^2.6.7", + "nopt": "^5.0.0", + "npmlog": "^5.0.1", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.11" + }, + "bin": { + "node-pre-gyp": "bin/node-pre-gyp" + } + }, "node_modules/@monaco-editor/loader": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.4.0.tgz", @@ -2215,6 +2236,15 @@ "react-dom": ">=16.6.0" } }, + "node_modules/@types/bcrypt": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@types/bcrypt/-/bcrypt-5.0.2.tgz", + "integrity": "sha512-6atioO8Y75fNcbmj0G7UjI9lXN2pQ/IGJ2FWT4a/btd0Lk9lQalHLKhkgKVZ3r+spnmWUKfbMi1GEe9wyHQfNQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/d3-array": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", @@ -2723,6 +2753,11 @@ "dev": true, "license": "Apache-2.0" }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, "node_modules/acorn": { "version": "8.11.3", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", @@ -2756,6 +2791,17 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -2868,6 +2914,24 @@ "node": ">= 8" } }, + "node_modules/aproba": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" + }, + "node_modules/are-we-there-yet": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", + "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", + "deprecated": "This package is no longer supported.", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/arg": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", @@ -3200,6 +3264,19 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "license": "MIT" }, + "node_modules/bcrypt": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", + "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==", + "hasInstallScript": true, + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.11", + "node-addon-api": "^5.0.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/binary-extensions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", @@ -3216,7 +3293,6 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", @@ -3406,6 +3482,14 @@ "node": ">= 6" } }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "engines": { + "node": ">=10" + } + }, "node_modules/chrome-trace-event": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", @@ -3506,6 +3590,14 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "license": "MIT" }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "bin": { + "color-support": "bin.js" + } + }, "node_modules/commander": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz", @@ -3527,9 +3619,13 @@ "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true, "license": "MIT" }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" + }, "node_modules/convert-source-map": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", @@ -3856,7 +3952,6 @@ "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, "license": "MIT", "dependencies": { "ms": "2.1.2" @@ -3919,6 +4014,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" + }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -3929,6 +4029,14 @@ "node": ">=6" } }, + "node_modules/detect-libc": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", + "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", + "engines": { + "node": ">=8" + } + }, "node_modules/detect-node-es": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", @@ -4953,11 +5061,32 @@ "node": ">=14.14" } }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true, "license": "ISC" }, "node_modules/fsevents": { @@ -5012,6 +5141,49 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gauge": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", + "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", + "deprecated": "This package is no longer supported.", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", + "signal-exit": "^3.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/gauge/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/gauge/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "node_modules/gauge/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -5389,6 +5561,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" + }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -5410,6 +5587,18 @@ "react-is": "^16.7.0" } }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -5479,7 +5668,6 @@ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "dev": true, "license": "ISC", "dependencies": { "once": "^1.3.0", @@ -5490,7 +5678,6 @@ "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true, "license": "ISC" }, "node_modules/internal-slot": { @@ -6327,7 +6514,6 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "dev": true, "license": "MIT", "dependencies": { "semver": "^6.0.0" @@ -6343,7 +6529,6 @@ "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -6431,7 +6616,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" @@ -6459,6 +6643,40 @@ "node": ">=16 || 14 >=14.17" } }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/moment": { "version": "2.30.1", "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", @@ -6491,7 +6709,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true, "license": "MIT" }, "node_modules/mz": { @@ -6661,6 +6878,30 @@ "node": "^10 || ^12 || >=14" } }, + "node_modules/node-addon-api": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", + "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/node-releases": { "version": "2.0.14", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", @@ -6668,6 +6909,20 @@ "dev": true, "license": "MIT" }, + "node_modules/nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -6687,6 +6942,18 @@ "node": ">=0.10.0" } }, + "node_modules/npmlog": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", + "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", + "deprecated": "This package is no longer supported.", + "dependencies": { + "are-we-there-yet": "^2.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" + } + }, "node_modules/oauth": { "version": "0.9.15", "resolved": "https://registry.npmjs.org/oauth/-/oauth-0.9.15.tgz", @@ -6848,7 +7115,6 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, "license": "ISC", "dependencies": { "wrappy": "1" @@ -6983,7 +7249,6 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -7725,6 +7990,19 @@ "pify": "^2.3.0" } }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -7886,7 +8164,6 @@ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "deprecated": "Rimraf versions prior to v4 are no longer supported", - "dev": true, "license": "ISC", "dependencies": { "glob": "^7.1.3" @@ -7903,7 +8180,6 @@ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "deprecated": "Glob versions prior to v9 are no longer supported", - "dev": true, "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", @@ -7966,7 +8242,6 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "dev": true, "funding": [ { "type": "github", @@ -8087,7 +8362,6 @@ "version": "7.6.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", - "dev": true, "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -8106,6 +8380,11 @@ "randombytes": "^2.1.0" } }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" + }, "node_modules/set-function-length": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", @@ -8255,6 +8534,14 @@ "node": ">=10.0.0" } }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, "node_modules/string-width": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.1.0.tgz", @@ -8694,6 +8981,30 @@ "node": ">=6" } }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "engines": { + "node": ">=8" + } + }, "node_modules/terser": { "version": "5.31.0", "resolved": "https://registry.npmjs.org/terser/-/terser-5.31.0.tgz", @@ -8829,6 +9140,11 @@ "node": ">=8.0" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, "node_modules/trim-repeated": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz", @@ -9225,6 +9541,11 @@ "node": ">=10.13.0" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, "node_modules/webpack": { "version": "5.91.0", "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.91.0.tgz", @@ -9326,6 +9647,15 @@ "url": "https://opencollective.com/webpack" } }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -9424,6 +9754,32 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wide-align/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/wide-align/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", @@ -9549,7 +9905,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true, "license": "ISC" }, "node_modules/y18n": { diff --git a/ui/package.json b/ui/package.json index 7b85a1597f..1c1efade10 100644 --- a/ui/package.json +++ b/ui/package.json @@ -32,6 +32,7 @@ "@types/node": "^20.12.12", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", + "bcrypt": "^5.1.1", "classnames": "^2.5.1", "long": "^5.2.3", "lucide-react": "^0.379.0", @@ -53,6 +54,7 @@ "zod": "^3.23.8" }, "devDependencies": { + "@types/bcrypt": "^5.0.2", "autoprefixer": "^10.4.19", "copy-webpack-plugin": "^12.0.2", "eslint": "^8.57.0", From 678e1ad2d73ac93cccacfee12dafd8f984d2a4b8 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Wed, 29 May 2024 19:43:57 +0530 Subject: [PATCH 14/31] fix import --- flow/cmd/custom_sync.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 93007ed2a3..3a872e9881 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -10,8 +10,8 @@ import ( "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/peerdbenv" peerflow "github.com/PeerDB-io/peer-flow/workflows" - "golang.org/x/crypto/bcrypt" + "golang.org/x/crypto/bcrypt" "google.golang.org/grpc/metadata" ) From 87809a99ab7df746b2d2b7fdd595f982cad376ac Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Wed, 29 May 2024 20:24:38 +0530 Subject: [PATCH 15/31] remove password from docker --- docker-compose-dev.yml | 2 +- docker-compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 166196c0f2..30defa24ff 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -131,7 +131,7 @@ services: - 8113:8113 environment: <<: [*catalog-config, *flow-worker-env, *minio-config] - PEERDB_PASSWORD: peerdb + PEERDB_PASSWORD: depends_on: temporal-admin-tools: condition: service_healthy diff --git a/docker-compose.yml b/docker-compose.yml index 154e4ba70d..a7ebd8da08 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -117,7 +117,7 @@ services: - 8113:8113 environment: <<: [*catalog-config, *flow-worker-env, *minio-config] - PEERDB_PASSWORD: peerdb + PEERDB_PASSWORD: depends_on: temporal-admin-tools: condition: service_healthy From f0d9909396f28033985e7b5f97c86d654398a023 Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Thu, 30 May 2024 19:36:48 +0530 Subject: [PATCH 16/31] lint --- ui/app/utils/apitoken.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ui/app/utils/apitoken.ts b/ui/app/utils/apitoken.ts index bb27cf1ddf..5098e5316c 100644 --- a/ui/app/utils/apitoken.ts +++ b/ui/app/utils/apitoken.ts @@ -1,7 +1,7 @@ +import bcrypt from 'bcrypt'; import 'server-only'; -import bcrypt from "bcrypt"; -function hashPassword(password:string, salt:number) { +function hashPassword(password: string, salt: number) { var hashed = bcrypt.hashSync(password, salt); // GOOD return hashed; } From e5260b09ddd03b55d48ac3382fcff4a909655f5f Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Thu, 30 May 2024 19:55:37 +0530 Subject: [PATCH 17/31] import fix attempt --- flow/cmd/custom_sync.go | 1 + 1 file changed, 1 insertion(+) diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 3a872e9881..6b59a2d357 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -12,6 +12,7 @@ import ( peerflow "github.com/PeerDB-io/peer-flow/workflows" "golang.org/x/crypto/bcrypt" + "google.golang.org/grpc/metadata" ) From 18883e78ffe828e1cef11a13102a1fbf723e4d7f Mon Sep 17 00:00:00 2001 From: Amogh-Bharadwaj Date: Thu, 30 May 2024 19:56:21 +0530 Subject: [PATCH 18/31] import fix attempt 2 --- flow/cmd/custom_sync.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 6b59a2d357..e990e769f4 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -10,9 +10,7 @@ import ( "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/peerdbenv" peerflow "github.com/PeerDB-io/peer-flow/workflows" - "golang.org/x/crypto/bcrypt" - "google.golang.org/grpc/metadata" ) From b567929dfa7dc8c31fe7cb7b662d31c886d013c6 Mon Sep 17 00:00:00 2001 From: Kaushik Iska Date: Thu, 30 May 2024 10:32:48 -0400 Subject: [PATCH 19/31] fix lint --- flow/cmd/custom_sync.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index e990e769f4..1c819f132a 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -7,11 +7,12 @@ import ( "log/slog" "strings" + "golang.org/x/crypto/bcrypt" + "google.golang.org/grpc/metadata" + "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/peerdbenv" peerflow "github.com/PeerDB-io/peer-flow/workflows" - "golang.org/x/crypto/bcrypt" - "google.golang.org/grpc/metadata" ) const peerdbPauseGuideDocLink = "https://docs.peerdb.io/features/pause-mirror" From 664cc79a0ec8e72a1bd471b41de53dfaae45380a Mon Sep 17 00:00:00 2001 From: Kunal Gupta <39487888+iamKunalGupta@users.noreply.github.com> Date: Thu, 6 Jun 2024 10:17:48 +0530 Subject: [PATCH 20/31] feat(auth): add auth to all flow-api endpoints - remove auth from single endpoints and add auth everywhere - update all calls in ui to use the auth TODOs: - add auth to nexus - maybe look at x-forwarded-for for nexus --- flow/cmd/api.go | 12 +++- flow/cmd/custom_sync.go | 34 ---------- flow/cmd/mirror_status.go | 10 --- flow/middleware/auth.go | 45 +++++++++++++ flow/middleware/interceptors.go | 82 +++++++++++++++++++++++ ui/app/api/mirrors/cdc/route.ts | 22 +++--- ui/app/api/mirrors/cdc/validate/route.ts | 22 +++--- ui/app/api/mirrors/drop/route.ts | 22 +++--- ui/app/api/mirrors/qrep/route.ts | 22 +++--- ui/app/api/mirrors/state/route.ts | 32 ++++----- ui/app/api/mirrors/state_change/route.ts | 23 +++---- ui/app/api/peers/columns/route.ts | 20 +++--- ui/app/api/peers/drop/route.ts | 22 +++--- ui/app/api/peers/publications/route.ts | 21 +++--- ui/app/api/peers/route.ts | 39 +++++------ ui/app/api/peers/schemas/route.ts | 19 +++--- ui/app/api/peers/tables/all/route.ts | 18 ++--- ui/app/api/peers/tables/route.ts | 20 +++--- ui/app/api/version/route.ts | 23 ++++--- ui/app/mirrors/[mirrorId]/page.tsx | 27 +++++--- ui/app/mirrors/create/handlers.ts | 1 + ui/app/peers/[peerName]/page.tsx | 85 ++++++++++++++---------- ui/app/peers/page.tsx | 7 +- ui/package-lock.json | 74 ++++++++++++++++++++- ui/package.json | 1 + ui/rpc/http.ts | 35 ++++++++++ 26 files changed, 481 insertions(+), 257 deletions(-) create mode 100644 flow/middleware/auth.go create mode 100644 flow/middleware/interceptors.go diff --git a/flow/cmd/api.go b/flow/cmd/api.go index 5b010916db..1b68d60fe7 100644 --- a/flow/cmd/api.go +++ b/flow/cmd/api.go @@ -23,6 +23,7 @@ import ( "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/logger" + "github.com/PeerDB-io/peer-flow/middleware" "github.com/PeerDB-io/peer-flow/peerdbenv" "github.com/PeerDB-io/peer-flow/shared" peerflow "github.com/PeerDB-io/peer-flow/workflows" @@ -115,7 +116,16 @@ func APIMain(ctx context.Context, args *APIServerParams) error { return fmt.Errorf("unable to create Temporal client: %w", err) } - grpcServer := grpc.NewServer() + healthMethods := []string{ + grpc_health_v1.Health_Check_FullMethodName, + grpc_health_v1.Health_Watch_FullMethodName, + } + grpcServer := grpc.NewServer( + grpc.ChainUnaryInterceptor( + middleware.CreateRequestLoggingInterceptor(healthMethods), + middleware.CreateAuthServerInterceptor(ctx, peerdbenv.PeerDBPassword(), healthMethods), + ), + ) catalogConn, err := peerdbenv.GetCatalogConnectionPoolFromEnv(ctx) if err != nil { diff --git a/flow/cmd/custom_sync.go b/flow/cmd/custom_sync.go index 1c819f132a..27a7d97e47 100644 --- a/flow/cmd/custom_sync.go +++ b/flow/cmd/custom_sync.go @@ -2,13 +2,8 @@ package cmd import ( "context" - "errors" "fmt" "log/slog" - "strings" - - "golang.org/x/crypto/bcrypt" - "google.golang.org/grpc/metadata" "github.com/PeerDB-io/peer-flow/generated/protos" "github.com/PeerDB-io/peer-flow/peerdbenv" @@ -17,29 +12,6 @@ import ( const peerdbPauseGuideDocLink = "https://docs.peerdb.io/features/pause-mirror" -func AuthenticateSyncRequest(ctx context.Context) error { - var values []string - var token string - - md, ok := metadata.FromIncomingContext(ctx) - if ok { - values = md.Get("authorization") - } - - if len(values) > 0 { - token = values[0] - } - - password := peerdbenv.PeerDBPassword() - _, hashedKey, _ := strings.Cut(token, " ") - if bcrypt.CompareHashAndPassword([]byte(hashedKey), []byte(password)) != nil { - slog.Error("Unauthorized: invalid authorization token") - return errors.New("unauthorized: invalid authorization token. Please check the token and try again") - } - - return nil -} - func (h *FlowRequestHandler) CustomSyncFlow( ctx context.Context, req *protos.CreateCustomSyncRequest, ) (*protos.CreateCustomSyncResponse, error) { @@ -49,13 +21,7 @@ func (h *FlowRequestHandler) CustomSyncFlow( ErrorMessage: "error while processing request", Ok: false, } - err := AuthenticateSyncRequest(ctx) - if err != nil { - errResponse.ErrorMessage = err.Error() - return errResponse, nil - } - // ---- REQUEST VALIDATION ---- if req.FlowJobName == "" { errResponse.ErrorMessage = "Mirror name cannot be empty." return errResponse, nil diff --git a/flow/cmd/mirror_status.go b/flow/cmd/mirror_status.go index 0cf5ab243f..411f855942 100644 --- a/flow/cmd/mirror_status.go +++ b/flow/cmd/mirror_status.go @@ -23,16 +23,6 @@ func (h *FlowRequestHandler) MirrorStatus( slog.Bool("includeFlowInfo", req.IncludeFlowInfo), slog.String(string(shared.FlowNameKey), req.FlowJobName)) - err := AuthenticateSyncRequest(ctx) - if err != nil { - return &protos.MirrorStatusResponse{ - FlowJobName: req.FlowJobName, - CurrentFlowState: protos.FlowStatus_STATUS_UNKNOWN, - ErrorMessage: "Unauthorized: invalid authorization token. Please check the token and try again.", - Ok: false, - }, nil - } - workflowID, err := h.getWorkflowID(ctx, req.FlowJobName) if err != nil { return &protos.MirrorStatusResponse{ diff --git a/flow/middleware/auth.go b/flow/middleware/auth.go new file mode 100644 index 0000000000..0acdecab6b --- /dev/null +++ b/flow/middleware/auth.go @@ -0,0 +1,45 @@ +package middleware + +import ( + "context" + "encoding/base64" + "log/slog" + "strings" + + "golang.org/x/crypto/bcrypt" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + + "github.com/PeerDB-io/peer-flow/logger" +) + +// Authorize checks the authorization metadata and compares the incoming bearer token with the plaintext +func Authorize(ctx context.Context, plaintext string) (context.Context, error) { + md, _ := metadata.FromIncomingContext(ctx) + if len(md["authorization"]) == 0 { + return nil, status.Errorf(codes.Unauthenticated, "Authorization token is required") + } + headerValue := md["authorization"][0] + bearerPrefix := "Bearer " + if !strings.HasPrefix(headerValue, bearerPrefix) { + return nil, status.Errorf(codes.Unauthenticated, "Unsupported authorization type") + } + base64Token := strings.TrimPrefix(headerValue, bearerPrefix) + if base64Token == "" { + return nil, status.Errorf(codes.Unauthenticated, "Authorization token is required") + } + // Always a good practice to have the actual token in base64 + tokenBytes, err := base64.StdEncoding.DecodeString(base64Token) + if err != nil { + logger.LoggerFromCtx(ctx).Warn("Error decoding token", slog.String("token", base64Token), slog.Any("error", err)) + return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") + } + token := string(tokenBytes) + err = bcrypt.CompareHashAndPassword([]byte(token), []byte(plaintext)) + if err != nil { + logger.LoggerFromCtx(ctx).Warn("Error validating token", slog.String("token", token), slog.Any("error", err)) + return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") + } + return ctx, nil +} diff --git a/flow/middleware/interceptors.go b/flow/middleware/interceptors.go new file mode 100644 index 0000000000..76b7c6293a --- /dev/null +++ b/flow/middleware/interceptors.go @@ -0,0 +1,82 @@ +package middleware + +import ( + "context" + "log/slog" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/peer" + "google.golang.org/grpc/status" + + "github.com/PeerDB-io/peer-flow/logger" +) + +func CreateAuthServerInterceptor(ctx context.Context, plaintext string, unauthenticatedMethods []string) grpc.UnaryServerInterceptor { + if plaintext == "" { + logger.LoggerFromCtx(ctx).Warn("Authentication is disabled") + //nolint:nonamedreturns + return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp any, err error) { + return handler(ctx, req) + } + } + unauthenticatedMethodsMap := make(map[string]bool) + for _, method := range unauthenticatedMethods { + unauthenticatedMethodsMap[method] = true + } + //nolint:nonamedreturns + return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp any, err error) { + if _, ok := unauthenticatedMethodsMap[info.FullMethod]; ok { + return handler(ctx, req) + } + ctx, err = Authorize(ctx, plaintext) + if err != nil { + return nil, err + } + return handler(ctx, req) + } +} + +// CreateRequestLoggingInterceptor logs all requests +// this is important for monitoring, debugging and auditing +func CreateRequestLoggingInterceptor(ignoredMethods []string) grpc.UnaryServerInterceptor { + ignoredMethodsMap := make(map[string]bool) + for _, method := range ignoredMethods { + ignoredMethodsMap[method] = true + } + //nolint:nonamedreturns + return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp any, err error) { + if _, ok := ignoredMethodsMap[info.FullMethod]; ok { + return handler(ctx, req) + } + start := time.Now() + logger.LoggerFromCtx(ctx).Info( + "Received request", + slog.String("grpc.method", info.FullMethod), + ) + resp, err = handler(ctx, req) + var errorCode string + if err != nil { + // if error is a grpc error, extract the error code + if grpcErr, ok := status.FromError(err); ok { + errorCode = grpcErr.Code().String() + } + } + // TODO maybe also look at x-forwarded-for ? + var clientIp string + if p, ok := peer.FromContext(ctx); ok { + clientIp = p.Addr.String() + } + + logger.LoggerFromCtx(ctx).Info( + "Request completed", + slog.String("grpc.method", info.FullMethod), + slog.Duration("duration", time.Since(start)), + slog.Float64("duration_seconds", time.Since(start).Seconds()), + slog.Any("error", err), + slog.String("grpc.code", errorCode), + slog.String("client_ip", clientIp), + ) + return resp, err + } +} diff --git a/ui/app/api/mirrors/cdc/route.ts b/ui/app/api/mirrors/cdc/route.ts index c7c7bde379..9f90e11db3 100644 --- a/ui/app/api/mirrors/cdc/route.ts +++ b/ui/app/api/mirrors/cdc/route.ts @@ -3,26 +3,23 @@ import { CreateCDCFlowRequest, CreateCDCFlowResponse, } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { config } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); const req: CreateCDCFlowRequest = { connectionConfigs: config, }; try { - const createStatus: CreateCDCFlowResponse = await fetch( - `${flowServiceAddr}/v1/flows/cdc/create`, - { - method: 'POST', - body: JSON.stringify(req), - } - ).then((res) => { - return res.json(); - }); + const createStatus = await flowServiceClient + .post(`/v1/flows/cdc/create`, req) + .then((res) => res.data); if (!createStatus.workflowId) { return new Response(JSON.stringify(createStatus)); @@ -33,6 +30,7 @@ export async function POST(request: Request) { return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.log(message, e); } } diff --git a/ui/app/api/mirrors/cdc/validate/route.ts b/ui/app/api/mirrors/cdc/validate/route.ts index 8354dbef88..aa4b567c4a 100644 --- a/ui/app/api/mirrors/cdc/validate/route.ts +++ b/ui/app/api/mirrors/cdc/validate/route.ts @@ -2,29 +2,27 @@ import { CreateCDCFlowRequest, ValidateCDCMirrorResponse, } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; import { NextRequest } from 'next/server'; export async function POST(request: NextRequest) { const body = await request.json(); const { config } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); const req: CreateCDCFlowRequest = { connectionConfigs: config, }; try { - const validateResponse: ValidateCDCMirrorResponse = await fetch( - `${flowServiceAddr}/v1/mirrors/cdc/validate`, - { - method: 'POST', - body: JSON.stringify(req), - } - ).then((res) => { - return res.json(); - }); + const validateResponse: ValidateCDCMirrorResponse = await flowServiceClient + .post(`/v1/mirrors/cdc/validate`, req) + .then((res) => res.data); return new Response(JSON.stringify(validateResponse)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.log(message, e); } } diff --git a/ui/app/api/mirrors/drop/route.ts b/ui/app/api/mirrors/drop/route.ts index 205ab26fef..072d30cf20 100644 --- a/ui/app/api/mirrors/drop/route.ts +++ b/ui/app/api/mirrors/drop/route.ts @@ -1,11 +1,14 @@ import { UDropMirrorResponse } from '@/app/dto/MirrorsDTO'; import { ShutdownRequest, ShutdownResponse } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { workflowId, flowJobName, sourcePeer, destinationPeer } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); const req: ShutdownRequest = { workflowId, flowJobName, @@ -15,15 +18,9 @@ export async function POST(request: Request) { }; try { - const dropStatus: ShutdownResponse = await fetch( - `${flowServiceAddr}/v1/mirrors/drop`, - { - method: 'POST', - body: JSON.stringify(req), - } - ).then((res) => { - return res.json(); - }); + const dropStatus: ShutdownResponse = await flowServiceClient + .post(`/v1/mirrors/drop`, req) + .then((res) => res.data); let response: UDropMirrorResponse = { dropped: dropStatus.ok, errorMessage: dropStatus.errorMessage, @@ -31,6 +28,7 @@ export async function POST(request: Request) { return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); } } diff --git a/ui/app/api/mirrors/qrep/route.ts b/ui/app/api/mirrors/qrep/route.ts index 7febf60140..8c9f8175d1 100644 --- a/ui/app/api/mirrors/qrep/route.ts +++ b/ui/app/api/mirrors/qrep/route.ts @@ -3,33 +3,31 @@ import { CreateQRepFlowRequest, CreateQRepFlowResponse, } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { config } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); const req: CreateQRepFlowRequest = { qrepConfig: config, createCatalogEntry: true, }; try { - const createStatus: CreateQRepFlowResponse = await fetch( - `${flowServiceAddr}/v1/flows/qrep/create`, - { - method: 'POST', - body: JSON.stringify(req), - } - ).then((res) => { - return res.json(); - }); + const createStatus: CreateQRepFlowResponse = await flowServiceClient + .post(`/v1/flows/qrep/create`, req) + .then((res) => res.data); let response: UCreateMirrorResponse = { created: !!createStatus.workflowId, }; return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); } } diff --git a/ui/app/api/mirrors/state/route.ts b/ui/app/api/mirrors/state/route.ts index ba5d7b0a03..b0cfdd9fe8 100644 --- a/ui/app/api/mirrors/state/route.ts +++ b/ui/app/api/mirrors/state/route.ts @@ -1,30 +1,30 @@ -import { GetAPIToken } from '@/app/utils/apitoken'; import { MirrorStatusRequest, MirrorStatusResponse, } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body: MirrorStatusRequest = await request.json(); - const flowServiceAddr = GetFlowHttpAddressFromEnv(); - const authToken = GetAPIToken(); + const flowServiceClient = GetFlowServiceHttpClient(); try { - const res: MirrorStatusResponse = await fetch( - `${flowServiceAddr}/v1/mirrors/${body.flowJobName}?` + - new URLSearchParams({ + const res: MirrorStatusResponse = await flowServiceClient + .get(`/v1/mirrors/${body.flowJobName}?`, { + params: { include_flow_info: 'true', - }), - { - cache: 'no-store', - headers: new Headers({ Authorization: `Bearer ${authToken}` }), - } - ).then((res) => { - return res.json(); - }); + }, + headers: { + cache: 'no-store', + }, + }) + .then((res) => res.data); return new Response(JSON.stringify(res)); } catch (e) { - console.error(e); + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); } } diff --git a/ui/app/api/mirrors/state_change/route.ts b/ui/app/api/mirrors/state_change/route.ts index 0d81b75e7d..47d8fd645a 100644 --- a/ui/app/api/mirrors/state_change/route.ts +++ b/ui/app/api/mirrors/state_change/route.ts @@ -1,23 +1,20 @@ import { FlowStateChangeResponse } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); - const flowServiceAddr = GetFlowHttpAddressFromEnv(); - + const flowServiceClient = GetFlowServiceHttpClient(); try { - const res: FlowStateChangeResponse = await fetch( - `${flowServiceAddr}/v1/mirrors/state_change`, - { - method: 'POST', - body: JSON.stringify(body), - } - ).then((res) => { - return res.json(); - }); + const res: FlowStateChangeResponse = await flowServiceClient + .post(`/v1/mirrors/state_change`, body) + .then((res) => res.data); return new Response(JSON.stringify(res)); } catch (e) { - console.error(e); + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); } } diff --git a/ui/app/api/peers/columns/route.ts b/ui/app/api/peers/columns/route.ts index 9f45db092e..9d49881e39 100644 --- a/ui/app/api/peers/columns/route.ts +++ b/ui/app/api/peers/columns/route.ts @@ -1,22 +1,26 @@ import { UColumnsResponse } from '@/app/dto/PeersDTO'; import { TableColumnsResponse } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { peerName, schemaName, tableName } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); try { - const columnsList: TableColumnsResponse = await fetch( - `${flowServiceAddr}/v1/peers/columns?peer_name=${peerName}&schema_name=${schemaName}&table_name=${tableName}` - ).then((res) => { - return res.json(); - }); + const columnsList: TableColumnsResponse = await flowServiceClient + .get( + `/v1/peers/columns?peer_name=${peerName}&schema_name=${schemaName}&table_name=${tableName}` + ) + .then((res) => res.data); let response: UColumnsResponse = { columns: columnsList.columns, }; return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.log(message, e); } } diff --git a/ui/app/api/peers/drop/route.ts b/ui/app/api/peers/drop/route.ts index 7449480033..e71fb11bd5 100644 --- a/ui/app/api/peers/drop/route.ts +++ b/ui/app/api/peers/drop/route.ts @@ -1,25 +1,22 @@ import { UDropPeerResponse } from '@/app/dto/PeersDTO'; import { DropPeerRequest, DropPeerResponse } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { peerName } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); const req: DropPeerRequest = { peerName, }; console.log('/drop/peer: req:', req); try { - const dropStatus: DropPeerResponse = await fetch( - `${flowServiceAddr}/v1/peers/drop`, - { - method: 'POST', - body: JSON.stringify(req), - } - ).then((res) => { - return res.json(); - }); + const dropStatus = await flowServiceClient + .post(`/v1/peers/drop`, req) + .then((res) => res.data); let response: UDropPeerResponse = { dropped: dropStatus.ok, errorMessage: dropStatus.errorMessage, @@ -27,6 +24,7 @@ export async function POST(request: Request) { return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); } } diff --git a/ui/app/api/peers/publications/route.ts b/ui/app/api/peers/publications/route.ts index 64dcd3dcf6..d7abddc189 100644 --- a/ui/app/api/peers/publications/route.ts +++ b/ui/app/api/peers/publications/route.ts @@ -1,22 +1,27 @@ import { UPublicationsResponse } from '@/app/dto/PeersDTO'; import { PeerPublicationsResponse } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { peerName } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); try { - const publicationList: PeerPublicationsResponse = await fetch( - `${flowServiceAddr}/v1/peers/publications?peer_name=${peerName}` - ).then((res) => { - return res.json(); - }); + const publicationList: PeerPublicationsResponse = await flowServiceClient + .get( + `/v1/peers/publications?peer_name=${peerName}` + ) + .then((res) => res.data); let response: UPublicationsResponse = { publicationNames: publicationList.publicationNames, }; + console.log(response); return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.log(message, e); } } diff --git a/ui/app/api/peers/route.ts b/ui/app/api/peers/route.ts index 4f468d8fb2..a55cc91c6a 100644 --- a/ui/app/api/peers/route.ts +++ b/ui/app/api/peers/route.ts @@ -29,7 +29,10 @@ import { createPeerStatusFromJSON, validatePeerStatusFromJSON, } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; const constructPeer = ( name: string, @@ -101,20 +104,16 @@ export const dynamic = 'force-dynamic'; export async function POST(request: Request) { const body = await request.json(); const { name, type, config, mode } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); const peer = constructPeer(name, type, config); if (mode === 'validate') { const validateReq: ValidatePeerRequest = { peer }; try { - const validateStatus: ValidatePeerResponse = await fetch( - `${flowServiceAddr}/v1/peers/validate`, - { - method: 'POST', - body: JSON.stringify(validateReq), - } - ).then((res) => { - return res.json(); - }); + const validateStatus: ValidatePeerResponse = await flowServiceClient + .post(`/v1/peers/validate`, validateReq) + .then((res) => { + return res.data; + }); let response: UValidatePeerResponse = { valid: validatePeerStatusFromJSON(validateStatus.status) === @@ -123,20 +122,15 @@ export async function POST(request: Request) { }; return new Response(JSON.stringify(response)); } catch (error) { - console.error('Error validating peer:', error); + const message = ParseFlowServiceErrorMessage(error); + console.error('Error validating peer:', message, error); } } else if (mode === 'create') { const req: CreatePeerRequest = { peer }; try { - const createStatus: CreatePeerResponse = await fetch( - `${flowServiceAddr}/v1/peers/create`, - { - method: 'POST', - body: JSON.stringify(req), - } - ).then((res) => { - return res.json(); - }); + const createStatus = await flowServiceClient + .post(`/v1/peers/create`, req) + .then((res) => res.data); let response: UCreatePeerResponse = { created: createPeerStatusFromJSON(createStatus.status) === @@ -145,7 +139,8 @@ export async function POST(request: Request) { }; return new Response(JSON.stringify(response)); } catch (error) { - console.error('Error creating peer:', error); + const message = ParseFlowServiceErrorMessage(error); + console.error('Error creating peer:', message, error); } } } diff --git a/ui/app/api/peers/schemas/route.ts b/ui/app/api/peers/schemas/route.ts index 9a1c10856e..ebc15ad3cc 100644 --- a/ui/app/api/peers/schemas/route.ts +++ b/ui/app/api/peers/schemas/route.ts @@ -1,21 +1,24 @@ import { USchemasResponse } from '@/app/dto/PeersDTO'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { PeerSchemasResponse } from '@/grpc_generated/route'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { peerName } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); try { - const schemaList = await fetch( - `${flowServiceAddr}/v1/peers/schemas?peer_name=${peerName}` - ).then((res) => { - return res.json(); - }); + const schemaList = await flowServiceClient + .get(`/v1/peers/schemas?peer_name=${peerName}`) + .then((res) => res.data); let response: USchemasResponse = { schemas: schemaList.schemas, }; return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.log(message, e); } } diff --git a/ui/app/api/peers/tables/all/route.ts b/ui/app/api/peers/tables/all/route.ts index 0281cc7067..09ae4b8d53 100644 --- a/ui/app/api/peers/tables/all/route.ts +++ b/ui/app/api/peers/tables/all/route.ts @@ -1,22 +1,24 @@ import { UTablesAllResponse } from '@/app/dto/PeersDTO'; import { AllTablesResponse } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { peerName } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); try { - const tableList: AllTablesResponse = await fetch( - `${flowServiceAddr}/v1/peers/tables/all?peer_name=${peerName}` - ).then((res) => { - return res.json(); - }); + const tableList: AllTablesResponse = await flowServiceClient + .get(`/v1/peers/tables/all?peer_name=${peerName}`) + .then((res) => res.data); let response: UTablesAllResponse = { tables: tableList.tables, }; return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); } } diff --git a/ui/app/api/peers/tables/route.ts b/ui/app/api/peers/tables/route.ts index b4c73500dd..9b5b4c7a20 100644 --- a/ui/app/api/peers/tables/route.ts +++ b/ui/app/api/peers/tables/route.ts @@ -1,22 +1,26 @@ import { UTablesResponse } from '@/app/dto/PeersDTO'; import { SchemaTablesResponse } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export async function POST(request: Request) { const body = await request.json(); const { peerName, schemaName } = body; - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); try { - const tableList: SchemaTablesResponse = await fetch( - `${flowServiceAddr}/v1/peers/tables?peer_name=${peerName}&schema_name=${schemaName}` - ).then((res) => { - return res.json(); - }); + const tableList: SchemaTablesResponse = await flowServiceClient + .get( + `/v1/peers/tables?peer_name=${peerName}&schema_name=${schemaName}` + ) + .then((res) => res.data); let response: UTablesResponse = { tables: tableList.tables, }; return new Response(JSON.stringify(response)); } catch (e) { - console.log(e); + const message = ParseFlowServiceErrorMessage(e); + console.log(message, e); } } diff --git a/ui/app/api/version/route.ts b/ui/app/api/version/route.ts index 1197ca2049..c62c1741c9 100644 --- a/ui/app/api/version/route.ts +++ b/ui/app/api/version/route.ts @@ -1,23 +1,24 @@ -import { UVersionResponse } from '@/app/dto/VersionDTO'; import { PeerDBVersionResponse } from '@/grpc_generated/route'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; export const dynamic = 'force-dynamic'; export async function GET() { - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); try { - const versionResponse: PeerDBVersionResponse = await fetch( - `${flowServiceAddr}/v1/version` - ).then((res) => { - return res.json(); - }); - let response: UVersionResponse = { + const versionResponse = await flowServiceClient + .get(`/v1/version`) + .then((res) => res.data); + let response = { version: versionResponse.version, }; return new Response(JSON.stringify(response)); } catch (error) { - console.error('Error getting version:', error); - return new Response(JSON.stringify({ error: error })); + const message = ParseFlowServiceErrorMessage(error); + console.error('Error getting version:', message); + return new Response(JSON.stringify({ error: message })); } } diff --git a/ui/app/mirrors/[mirrorId]/page.tsx b/ui/app/mirrors/[mirrorId]/page.tsx index bfdbc67c6e..0baf327fd5 100644 --- a/ui/app/mirrors/[mirrorId]/page.tsx +++ b/ui/app/mirrors/[mirrorId]/page.tsx @@ -7,7 +7,10 @@ import { DBType } from '@/grpc_generated/peers'; import { MirrorStatusResponse } from '@/grpc_generated/route'; import { Header } from '@/lib/Header'; import { LayoutMain } from '@/lib/Layout'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; import { redirect } from 'next/navigation'; import { CDCMirror } from './cdc'; import NoMirror from './nomirror'; @@ -18,25 +21,29 @@ type EditMirrorProps = { }; function getMirrorStatusUrl(mirrorId: string) { - let base = GetFlowHttpAddressFromEnv(); - return `${base}/v1/mirrors/${mirrorId}?include_flow_info=true`; + return `/v1/mirrors/${mirrorId}?include_flow_info=true`; } async function getMirrorStatus(mirrorId: string) { const url = getMirrorStatusUrl(mirrorId); + const flowServiceClient = GetFlowServiceHttpClient(); const apiToken = GetAPIToken(); - const resp = await fetch(url, { - cache: 'no-store', - headers: { Authorization: `Bearer ${apiToken}` }, - }); - const json = await resp.json(); - return json; + try { + return await flowServiceClient + .get(url, { + headers: { cache: 'no-store' }, + }) + .then((res) => res.data); + } catch (e) { + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); + } } export default async function ViewMirror({ params: { mirrorId }, }: EditMirrorProps) { - const mirrorStatus: MirrorStatusResponse = await getMirrorStatus(mirrorId); + const mirrorStatus = await getMirrorStatus(mirrorId); if (!mirrorStatus) { return
No mirror status found!
; } diff --git a/ui/app/mirrors/create/handlers.ts b/ui/app/mirrors/create/handlers.ts index 5df96cfb76..2797839095 100644 --- a/ui/app/mirrors/create/handlers.ts +++ b/ui/app/mirrors/create/handlers.ts @@ -433,6 +433,7 @@ export const handleValidateCDC = async ( }; export const fetchPublications = async (peerName: string) => { + if (!peerName || peerName.length === 0) return []; const publicationsRes: UPublicationsResponse = await fetch( '/api/peers/publications', { diff --git a/ui/app/peers/[peerName]/page.tsx b/ui/app/peers/[peerName]/page.tsx index 5d5742c8cd..1dfecb1673 100644 --- a/ui/app/peers/[peerName]/page.tsx +++ b/ui/app/peers/[peerName]/page.tsx @@ -2,7 +2,10 @@ import { PeerInfo } from '@/components/PeerInfo'; import ReloadButton from '@/components/ReloadButton'; import { PeerSlotResponse, PeerStatResponse } from '@/grpc_generated/route'; import { Label } from '@/lib/Label'; -import { GetFlowHttpAddressFromEnv } from '@/rpc/http'; +import { + GetFlowServiceHttpClient, + ParseFlowServiceErrorMessage, +} from '@/rpc/http'; import LagGraph from './lagGraph'; import SlotTable from './slottable'; import StatTable from './stattable'; @@ -13,44 +16,58 @@ type DataConfigProps = { const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const getSlotData = async () => { - const flowServiceAddr = GetFlowHttpAddressFromEnv(); + const flowServiceClient = GetFlowServiceHttpClient(); + try { + const peerSlots: PeerSlotResponse = await flowServiceClient + .get(`/v1/peers/slots/${peerName}`, { + headers: { + cache: 'no-store', + }, + }) + .then((res) => res.data); - const peerSlots: PeerSlotResponse = await fetch( - `${flowServiceAddr}/v1/peers/slots/${peerName}`, - { - cache: 'no-store', - } - ).then((res) => res.json()); - - const slotArray = peerSlots.slotData; - // slots with 'peerflow_slot' should come first - slotArray?.sort((slotA, slotB) => { - if ( - slotA.slotName.startsWith('peerflow_slot') && - !slotB.slotName.startsWith('peerflow_slot') - ) { - return -1; - } else if ( - !slotA.slotName.startsWith('peerflow_slot') && - slotB.slotName.startsWith('peerflow_slot') - ) { - return 1; - } else { - return 0; - } - }); - return slotArray; + const slotArray = peerSlots.slotData; + // slots with 'peerflow_slot' should come first + slotArray?.sort((slotA, slotB) => { + if ( + slotA.slotName.startsWith('peerflow_slot') && + !slotB.slotName.startsWith('peerflow_slot') + ) { + return -1; + } else if ( + !slotA.slotName.startsWith('peerflow_slot') && + slotB.slotName.startsWith('peerflow_slot') + ) { + return 1; + } else { + return 0; + } + }); + return slotArray; + } catch (e) { + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); + return []; + } }; const getStatData = async () => { - const flowServiceAddr = GetFlowHttpAddressFromEnv(); - - const peerStats: PeerStatResponse = await fetch( - `${flowServiceAddr}/v1/peers/stats/${peerName}`, - { cache: 'no-store' } - ).then((res) => res.json()); + const flowServiceClient = GetFlowServiceHttpClient(); + try { + const peerStats: PeerStatResponse = await flowServiceClient + .get(`/v1/peers/stats/${peerName}`, { + headers: { + cache: 'no-store', + }, + }) + .then((res) => res.data); - return peerStats.statData; + return peerStats.statData; + } catch (e) { + const message = ParseFlowServiceErrorMessage(e); + console.error(message, e); + return []; + } }; const slots = await getSlotData(); diff --git a/ui/app/peers/page.tsx b/ui/app/peers/page.tsx index 6ba3e68893..49b8f88e44 100644 --- a/ui/app/peers/page.tsx +++ b/ui/app/peers/page.tsx @@ -4,20 +4,19 @@ import { Icon } from '@/lib/Icon'; import { Label } from '@/lib/Label'; import { LayoutMain } from '@/lib/Layout'; import { Panel } from '@/lib/Panel'; +import { ProgressCircle } from '@/lib/ProgressCircle'; import Link from 'next/link'; import { Header } from '../../lib/Header'; import PeersTable from './peersTable'; -export const dynamic = 'force-dynamic'; - -import { ProgressCircle } from '@/lib/ProgressCircle'; import NewButton from '@/components/NewButton'; import useSWR from 'swr'; import { fetcher } from '../utils/swr'; +export const dynamic = 'force-dynamic'; + export default function Peers() { const { data: peers, error, isLoading } = useSWR('/api/peers', fetcher); - return ( diff --git a/ui/package-lock.json b/ui/package-lock.json index f57bc77244..f8d323577a 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -30,6 +30,7 @@ "@types/node": "^20.12.12", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", + "axios": "^1.7.2", "bcrypt": "^5.1.1", "classnames": "^2.5.1", "long": "^5.2.3", @@ -3169,6 +3170,11 @@ "dev": true, "license": "MIT" }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, "node_modules/autoprefixer": { "version": "10.4.19", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", @@ -3233,6 +3239,16 @@ "node": ">=4" } }, + "node_modules/axios": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz", + "integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, "node_modules/axobject-query": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz", @@ -3598,6 +3614,17 @@ "color-support": "bin.js" } }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/commander": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz", @@ -4014,6 +4041,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", @@ -5006,6 +5041,25 @@ "dev": true, "license": "ISC" }, + "node_modules/follow-redirects": { + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, "node_modules/for-each": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", @@ -5032,6 +5086,19 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/fraction.js": { "version": "4.3.7", "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", @@ -6593,7 +6660,6 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, "license": "MIT", "engines": { "node": ">= 0.6" @@ -6603,7 +6669,6 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, "license": "MIT", "dependencies": { "mime-db": "1.52.0" @@ -7733,6 +7798,11 @@ "node": ">=12.0.0" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, "node_modules/prr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", diff --git a/ui/package.json b/ui/package.json index 1c1efade10..d6b4181291 100644 --- a/ui/package.json +++ b/ui/package.json @@ -32,6 +32,7 @@ "@types/node": "^20.12.12", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", + "axios": "^1.7.2", "bcrypt": "^5.1.1", "classnames": "^2.5.1", "long": "^5.2.3", diff --git a/ui/rpc/http.ts b/ui/rpc/http.ts index c0d9b75542..1c1aedced9 100644 --- a/ui/rpc/http.ts +++ b/ui/rpc/http.ts @@ -1,5 +1,40 @@ +import { ServiceError } from '@grpc/grpc-js'; +import axios from 'axios'; +import bcrypt from 'bcrypt'; import 'server-only'; +function hashPassword(password: string, salt: number) { + return bcrypt.hashSync(password, salt); +} + +export function GetAPIToken() { + const password = process.env.PEERDB_PASSWORD!; + const hashedPassword = hashPassword(password, 10); + return Buffer.from(hashedPassword).toString('base64'); +} + export function GetFlowHttpAddressFromEnv() { return process.env.PEERDB_FLOW_SERVER_HTTP!; } + +const flowServiceHttpClient = axios.create({ + baseURL: GetFlowHttpAddressFromEnv(), + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${GetAPIToken()}`, + }, +}); + +export function GetFlowServiceHttpClient() { + return flowServiceHttpClient; +} + +export function ParseFlowServiceErrorMessage(error: any) { + if (axios.isAxiosError(error)) { + if (error.response && (error.response.data as ServiceError).code) { + return (error.response.data as ServiceError).message; + } + return error.response?.data || error.message; + } + return error; +} From adb7f64ea1074aa035c6724904ae143093af7184 Mon Sep 17 00:00:00 2001 From: Kunal Gupta <39487888+iamKunalGupta@users.noreply.github.com> Date: Thu, 6 Jun 2024 11:02:09 +0530 Subject: [PATCH 21/31] feat: add auth interceptor to nexus --- nexus/Cargo.lock | 25 +++++++++++++++++++++ nexus/flow-rs/Cargo.toml | 2 ++ nexus/flow-rs/src/grpc.rs | 46 +++++++++++++++++++++++++++++---------- nexus/server/src/main.rs | 5 ++++- 4 files changed, 66 insertions(+), 12 deletions(-) diff --git a/nexus/Cargo.lock b/nexus/Cargo.lock index bc3e85c526..af4f61a435 100644 --- a/nexus/Cargo.lock +++ b/nexus/Cargo.lock @@ -319,6 +319,19 @@ dependencies = [ "smallvec", ] +[[package]] +name = "bcrypt" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e65938ed058ef47d92cf8b346cc76ef48984572ade631927e9937b5ffc7662c7" +dependencies = [ + "base64 0.22.1", + "blowfish", + "getrandom", + "subtle", + "zeroize", +] + [[package]] name = "bindgen" version = "0.69.4" @@ -381,6 +394,16 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blowfish" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7" +dependencies = [ + "byteorder", + "cipher", +] + [[package]] name = "borsh" version = "1.5.0" @@ -986,6 +1009,8 @@ name = "flow-rs" version = "0.1.0" dependencies = [ "anyhow", + "base64 0.22.1", + "bcrypt", "catalog", "pt", "serde_json", diff --git a/nexus/flow-rs/Cargo.toml b/nexus/flow-rs/Cargo.toml index 5ae9cda2d9..2f5edf72dd 100644 --- a/nexus/flow-rs/Cargo.toml +++ b/nexus/flow-rs/Cargo.toml @@ -10,3 +10,5 @@ tracing.workspace = true tonic-health = "0.11" pt = { path = "../pt" } catalog = { path = "../catalog" } +bcrypt = "0.15.1" +base64 = "0.22.1" diff --git a/nexus/flow-rs/src/grpc.rs b/nexus/flow-rs/src/grpc.rs index a07115c1ec..171510ba6f 100644 --- a/nexus/flow-rs/src/grpc.rs +++ b/nexus/flow-rs/src/grpc.rs @@ -1,11 +1,15 @@ +use std::str::FromStr; + +use base64::Engine; +use serde_json::Value; +use tonic_health::pb::health_client; + use catalog::WorkflowDetails; use pt::{ flow_model::{FlowJob, QRepFlowJob}, peerdb_flow::{QRepWriteMode, QRepWriteType, TypeSystem}, peerdb_route, tonic, }; -use serde_json::Value; -use tonic_health::pb::health_client; pub enum PeerValidationResult { Valid, @@ -13,13 +17,29 @@ pub enum PeerValidationResult { } pub struct FlowGrpcClient { - client: peerdb_route::flow_service_client::FlowServiceClient, + client: peerdb_route::flow_service_client::FlowServiceClient< + tonic::codegen::InterceptedService, + >, health_client: health_client::HealthClient, } +struct BearerAuthInterceptor { + token: String, +} + +impl tonic::service::Interceptor for BearerAuthInterceptor { + fn call(&mut self, mut request: tonic::Request<()>) -> Result, tonic::Status> { + request.metadata_mut().insert( + "authorization", + tonic::metadata::MetadataValue::from_str(format!("Bearer {0}", self.token).as_str()).unwrap(), + ); + Ok(request) + } +} + impl FlowGrpcClient { // create a new grpc client to the flow server using flow server address - pub async fn new(flow_server_addr: &str) -> anyhow::Result { + pub async fn new(flow_server_addr: &str, password: String) -> anyhow::Result { // change protocol to grpc let flow_server_addr = flow_server_addr.replace("http", "grpc"); @@ -30,12 +50,16 @@ impl FlowGrpcClient { // Create a gRPC channel let channel = tonic::transport::Channel::from_shared(grpc_endpoint.clone())?.connect_lazy(); - // construct a grpc client to the flow server - let client = peerdb_route::flow_service_client::FlowServiceClient::new(channel.clone()); + // Setup the token by hashing the password and base64 encoding it + let hashed_password = bcrypt::hash(password, bcrypt::DEFAULT_COST).unwrap(); + let token = base64::prelude::BASE64_STANDARD.encode(hashed_password.as_bytes()); - // construct a health client to the flow server, use the grpc endpoint - let health_client = health_client::HealthClient::new(channel); + // use the token in all requests + let interceptor = BearerAuthInterceptor { token }; + // construct a grpc client to the flow server with an interceptor + let client= peerdb_route::flow_service_client::FlowServiceClient::with_interceptor(channel.clone(), interceptor); + let health_client = health_client::HealthClient::new(channel); Ok(Self { client, health_client, @@ -267,9 +291,9 @@ impl FlowGrpcClient { } if !cfg.initial_copy_only { if let Some(QRepWriteMode { - write_type: wt, - upsert_key_columns: _, - }) = cfg.write_mode + write_type: wt, + upsert_key_columns: _, + }) = cfg.write_mode { if wt == QRepWriteType::QrepWriteModeOverwrite as i32 { return anyhow::Result::Err(anyhow::anyhow!( diff --git a/nexus/server/src/main.rs b/nexus/server/src/main.rs index c6787805b8..22c47ab8cf 100644 --- a/nexus/server/src/main.rs +++ b/nexus/server/src/main.rs @@ -1214,7 +1214,10 @@ pub async fn main() -> anyhow::Result<()> { // log that we accept mirror commands if we have a flow server let flow_handler = if let Some(ref addr) = args.flow_api_url { tracing::info!("MIRROR commands enabled"); - Some(Arc::new(Mutex::new(FlowGrpcClient::new(addr).await?))) + let password = args.peerdb_password.clone(); + Some(Arc::new(Mutex::new( + FlowGrpcClient::new(addr, password).await?, + ))) } else { tracing::info!("MIRROR commands disabled"); None From f1e5f4b202c04f1fdd308bdd696cae1512f2d203 Mon Sep 17 00:00:00 2001 From: Kunal Gupta <39487888+iamKunalGupta@users.noreply.github.com> Date: Thu, 6 Jun 2024 12:20:24 +0530 Subject: [PATCH 22/31] chore: small function param name change --- ui/rpc/http.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ui/rpc/http.ts b/ui/rpc/http.ts index 1c1aedced9..50a3a21625 100644 --- a/ui/rpc/http.ts +++ b/ui/rpc/http.ts @@ -3,8 +3,8 @@ import axios from 'axios'; import bcrypt from 'bcrypt'; import 'server-only'; -function hashPassword(password: string, salt: number) { - return bcrypt.hashSync(password, salt); +function hashPassword(password: string, rounds: number) { + return bcrypt.hashSync(password, rounds); } export function GetAPIToken() { From 6f0af22e36a0f7a8f2a5a5debb8be0d31183fc62 Mon Sep 17 00:00:00 2001 From: Kunal Gupta <39487888+iamKunalGupta@users.noreply.github.com> Date: Thu, 6 Jun 2024 12:41:26 +0530 Subject: [PATCH 23/31] fix(ui): client side token hash issue --- ui/app/mirrors/[mirrorId]/page.tsx | 2 -- ui/app/utils/apitoken.ts | 12 ------------ 2 files changed, 14 deletions(-) delete mode 100644 ui/app/utils/apitoken.ts diff --git a/ui/app/mirrors/[mirrorId]/page.tsx b/ui/app/mirrors/[mirrorId]/page.tsx index 0baf327fd5..ea02e0b5b4 100644 --- a/ui/app/mirrors/[mirrorId]/page.tsx +++ b/ui/app/mirrors/[mirrorId]/page.tsx @@ -1,5 +1,4 @@ import { SyncStatusRow } from '@/app/dto/MirrorsDTO'; -import { GetAPIToken } from '@/app/utils/apitoken'; import prisma from '@/app/utils/prisma'; import MirrorActions from '@/components/MirrorActionsDropdown'; import { FlowConnectionConfigs, FlowStatus } from '@/grpc_generated/flow'; @@ -27,7 +26,6 @@ function getMirrorStatusUrl(mirrorId: string) { async function getMirrorStatus(mirrorId: string) { const url = getMirrorStatusUrl(mirrorId); const flowServiceClient = GetFlowServiceHttpClient(); - const apiToken = GetAPIToken(); try { return await flowServiceClient .get(url, { diff --git a/ui/app/utils/apitoken.ts b/ui/app/utils/apitoken.ts deleted file mode 100644 index 5098e5316c..0000000000 --- a/ui/app/utils/apitoken.ts +++ /dev/null @@ -1,12 +0,0 @@ -import bcrypt from 'bcrypt'; -import 'server-only'; - -function hashPassword(password: string, salt: number) { - var hashed = bcrypt.hashSync(password, salt); // GOOD - return hashed; -} - -export function GetAPIToken() { - const password = process.env.PEERDB_PASSWORD!; - return hashPassword(password, 10); -} From ebdf444fbe993088a6a50bc6b234633c25788fa2 Mon Sep 17 00:00:00 2001 From: Kunal Gupta <39487888+iamKunalGupta@users.noreply.github.com> Date: Thu, 6 Jun 2024 13:04:10 +0530 Subject: [PATCH 24/31] fix(ui): separate token component to hopefully fix build failure --- ui/rpc/http.ts | 13 +------------ ui/rpc/token.ts | 12 ++++++++++++ 2 files changed, 13 insertions(+), 12 deletions(-) create mode 100644 ui/rpc/token.ts diff --git a/ui/rpc/http.ts b/ui/rpc/http.ts index 50a3a21625..265b6c05a9 100644 --- a/ui/rpc/http.ts +++ b/ui/rpc/http.ts @@ -1,17 +1,6 @@ +import { GetAPIToken } from '@/rpc/token'; import { ServiceError } from '@grpc/grpc-js'; import axios from 'axios'; -import bcrypt from 'bcrypt'; -import 'server-only'; - -function hashPassword(password: string, rounds: number) { - return bcrypt.hashSync(password, rounds); -} - -export function GetAPIToken() { - const password = process.env.PEERDB_PASSWORD!; - const hashedPassword = hashPassword(password, 10); - return Buffer.from(hashedPassword).toString('base64'); -} export function GetFlowHttpAddressFromEnv() { return process.env.PEERDB_FLOW_SERVER_HTTP!; diff --git a/ui/rpc/token.ts b/ui/rpc/token.ts new file mode 100644 index 0000000000..a5bb88acc3 --- /dev/null +++ b/ui/rpc/token.ts @@ -0,0 +1,12 @@ +import bcrypt from 'bcrypt'; +import 'server-only'; + +function hashPassword(password: string, rounds: number) { + return bcrypt.hashSync(password, rounds); +} + +export function GetAPIToken() { + const password = process.env.PEERDB_PASSWORD!; + const hashedPassword = hashPassword(password, 10); + return Buffer.from(hashedPassword).toString('base64'); +} From c3c0847ce75c373fb43de8b4b08e1264278e2a68 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philip=20Dub=C3=A9?= Date: Thu, 6 Jun 2024 18:48:34 +0000 Subject: [PATCH 25/31] feedback --- flow/middleware/auth.go | 14 +++++--------- flow/middleware/interceptors.go | 25 +++++++++++-------------- 2 files changed, 16 insertions(+), 23 deletions(-) diff --git a/flow/middleware/auth.go b/flow/middleware/auth.go index 0acdecab6b..2f513d083e 100644 --- a/flow/middleware/auth.go +++ b/flow/middleware/auth.go @@ -21,12 +21,10 @@ func Authorize(ctx context.Context, plaintext string) (context.Context, error) { return nil, status.Errorf(codes.Unauthenticated, "Authorization token is required") } headerValue := md["authorization"][0] - bearerPrefix := "Bearer " - if !strings.HasPrefix(headerValue, bearerPrefix) { + base64Token, hasPrefix := strings.CutPrefix(headerValue, "Bearer ") + if !hasPrefix { return nil, status.Errorf(codes.Unauthenticated, "Unsupported authorization type") - } - base64Token := strings.TrimPrefix(headerValue, bearerPrefix) - if base64Token == "" { + } else if base64Token == "" { return nil, status.Errorf(codes.Unauthenticated, "Authorization token is required") } // Always a good practice to have the actual token in base64 @@ -35,10 +33,8 @@ func Authorize(ctx context.Context, plaintext string) (context.Context, error) { logger.LoggerFromCtx(ctx).Warn("Error decoding token", slog.String("token", base64Token), slog.Any("error", err)) return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") } - token := string(tokenBytes) - err = bcrypt.CompareHashAndPassword([]byte(token), []byte(plaintext)) - if err != nil { - logger.LoggerFromCtx(ctx).Warn("Error validating token", slog.String("token", token), slog.Any("error", err)) + if err := bcrypt.CompareHashAndPassword(tokenBytes, []byte(plaintext)); err != nil { + logger.LoggerFromCtx(ctx).Warn("Error validating token", slog.String("token", string(tokenBytes)), slog.Any("error", err)) return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") } return ctx, nil diff --git a/flow/middleware/interceptors.go b/flow/middleware/interceptors.go index 76b7c6293a..ebaf15ec35 100644 --- a/flow/middleware/interceptors.go +++ b/flow/middleware/interceptors.go @@ -15,21 +15,19 @@ import ( func CreateAuthServerInterceptor(ctx context.Context, plaintext string, unauthenticatedMethods []string) grpc.UnaryServerInterceptor { if plaintext == "" { logger.LoggerFromCtx(ctx).Warn("Authentication is disabled") - //nolint:nonamedreturns - return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp any, err error) { + return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (any, error) { return handler(ctx, req) } } - unauthenticatedMethodsMap := make(map[string]bool) + unauthenticatedMethodsSet := make(map[string]struct{}) for _, method := range unauthenticatedMethods { - unauthenticatedMethodsMap[method] = true + unauthenticatedMethodsSet[method] = struct{}{} } - //nolint:nonamedreturns - return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp any, err error) { - if _, ok := unauthenticatedMethodsMap[info.FullMethod]; ok { + return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (any, error) { + if _, ok := unauthenticatedMethodsSet[info.FullMethod]; ok { return handler(ctx, req) } - ctx, err = Authorize(ctx, plaintext) + ctx, err := Authorize(ctx, plaintext) if err != nil { return nil, err } @@ -40,13 +38,12 @@ func CreateAuthServerInterceptor(ctx context.Context, plaintext string, unauthen // CreateRequestLoggingInterceptor logs all requests // this is important for monitoring, debugging and auditing func CreateRequestLoggingInterceptor(ignoredMethods []string) grpc.UnaryServerInterceptor { - ignoredMethodsMap := make(map[string]bool) + ignoredMethodsSet := make(map[string]struct{}) for _, method := range ignoredMethods { - ignoredMethodsMap[method] = true + ignoredMethodsSet[method] = struct{}{} } - //nolint:nonamedreturns - return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp any, err error) { - if _, ok := ignoredMethodsMap[info.FullMethod]; ok { + return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (any, error) { + if _, ok := ignoredMethodsSet[info.FullMethod]; ok { return handler(ctx, req) } start := time.Now() @@ -54,7 +51,7 @@ func CreateRequestLoggingInterceptor(ignoredMethods []string) grpc.UnaryServerIn "Received request", slog.String("grpc.method", info.FullMethod), ) - resp, err = handler(ctx, req) + resp, err := handler(ctx, req) var errorCode string if err != nil { // if error is a grpc error, extract the error code From 682f830e30eaac861e9ee4af3b32d0978e2e6812 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philip=20Dub=C3=A9?= Date: Thu, 6 Jun 2024 19:50:11 +0000 Subject: [PATCH 26/31] No axios --- docker-compose-dev.yml | 1 - docker-compose.yml | 1 - ui/app/api/mirrors/cdc/route.ts | 15 ++--- ui/app/api/mirrors/cdc/validate/route.ts | 11 ++-- ui/app/api/mirrors/drop/route.ts | 7 +-- ui/app/api/mirrors/qrep/route.ts | 10 ++-- ui/app/api/mirrors/state/route.ts | 31 ++-------- ui/app/api/mirrors/state_change/route.ts | 11 ++-- ui/app/api/peers/columns/route.ts | 9 ++- ui/app/api/peers/drop/route.ts | 9 +-- ui/app/api/peers/publications/route.ts | 9 ++- ui/app/api/peers/route.ts | 13 +++-- ui/app/api/peers/schemas/route.ts | 8 +-- ui/app/api/peers/tables/all/route.ts | 9 +-- ui/app/api/peers/tables/route.ts | 9 ++- ui/app/api/version/route.ts | 7 +-- ui/app/mirrors/[mirrorId]/page.tsx | 10 ++-- ui/app/peers/[peerName]/page.tsx | 30 ++++------ ui/package-lock.json | 74 +----------------------- ui/package.json | 1 - ui/rpc/http.ts | 58 ++++++++++++++----- ui/rpc/token.ts | 2 +- 22 files changed, 131 insertions(+), 204 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 30defa24ff..1868c755bf 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -131,7 +131,6 @@ services: - 8113:8113 environment: <<: [*catalog-config, *flow-worker-env, *minio-config] - PEERDB_PASSWORD: depends_on: temporal-admin-tools: condition: service_healthy diff --git a/docker-compose.yml b/docker-compose.yml index a7ebd8da08..1b1617233d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -117,7 +117,6 @@ services: - 8113:8113 environment: <<: [*catalog-config, *flow-worker-env, *minio-config] - PEERDB_PASSWORD: depends_on: temporal-admin-tools: condition: service_healthy diff --git a/ui/app/api/mirrors/cdc/route.ts b/ui/app/api/mirrors/cdc/route.ts index 9f90e11db3..dfdcda2ade 100644 --- a/ui/app/api/mirrors/cdc/route.ts +++ b/ui/app/api/mirrors/cdc/route.ts @@ -1,8 +1,5 @@ import { UCreateMirrorResponse } from '@/app/dto/MirrorsDTO'; -import { - CreateCDCFlowRequest, - CreateCDCFlowResponse, -} from '@/grpc_generated/route'; +import { CreateCDCFlowRequest } from '@/grpc_generated/route'; import { GetFlowServiceHttpClient, ParseFlowServiceErrorMessage, @@ -17,10 +14,10 @@ export async function POST(request: Request) { connectionConfigs: config, }; try { - const createStatus = await flowServiceClient - .post(`/v1/flows/cdc/create`, req) - .then((res) => res.data); - + const createStatus = await flowServiceClient.post( + `/v1/flows/cdc/create`, + req + ); if (!createStatus.workflowId) { return new Response(JSON.stringify(createStatus)); } @@ -30,7 +27,7 @@ export async function POST(request: Request) { return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.log(message, e); } } diff --git a/ui/app/api/mirrors/cdc/validate/route.ts b/ui/app/api/mirrors/cdc/validate/route.ts index aa4b567c4a..7e57f949f8 100644 --- a/ui/app/api/mirrors/cdc/validate/route.ts +++ b/ui/app/api/mirrors/cdc/validate/route.ts @@ -16,13 +16,14 @@ export async function POST(request: NextRequest) { connectionConfigs: config, }; try { - const validateResponse: ValidateCDCMirrorResponse = await flowServiceClient - .post(`/v1/mirrors/cdc/validate`, req) - .then((res) => res.data); - + const validateResponse: ValidateCDCMirrorResponse = + await flowServiceClient.post( + `/v1/mirrors/cdc/validate`, + req + ); return new Response(JSON.stringify(validateResponse)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.log(message, e); } } diff --git a/ui/app/api/mirrors/drop/route.ts b/ui/app/api/mirrors/drop/route.ts index 072d30cf20..a16fc52d4e 100644 --- a/ui/app/api/mirrors/drop/route.ts +++ b/ui/app/api/mirrors/drop/route.ts @@ -18,9 +18,8 @@ export async function POST(request: Request) { }; try { - const dropStatus: ShutdownResponse = await flowServiceClient - .post(`/v1/mirrors/drop`, req) - .then((res) => res.data); + const dropStatus: ShutdownResponse = + await flowServiceClient.post(`/v1/mirrors/drop`, req); let response: UDropMirrorResponse = { dropped: dropStatus.ok, errorMessage: dropStatus.errorMessage, @@ -28,7 +27,7 @@ export async function POST(request: Request) { return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); } } diff --git a/ui/app/api/mirrors/qrep/route.ts b/ui/app/api/mirrors/qrep/route.ts index 8c9f8175d1..ac605ebed8 100644 --- a/ui/app/api/mirrors/qrep/route.ts +++ b/ui/app/api/mirrors/qrep/route.ts @@ -18,16 +18,18 @@ export async function POST(request: Request) { createCatalogEntry: true, }; try { - const createStatus: CreateQRepFlowResponse = await flowServiceClient - .post(`/v1/flows/qrep/create`, req) - .then((res) => res.data); + const createStatus: CreateQRepFlowResponse = + await flowServiceClient.post( + `/v1/flows/qrep/create`, + req + ); let response: UCreateMirrorResponse = { created: !!createStatus.workflowId, }; return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); } } diff --git a/ui/app/api/mirrors/state/route.ts b/ui/app/api/mirrors/state/route.ts index b0cfdd9fe8..3eaf305fab 100644 --- a/ui/app/api/mirrors/state/route.ts +++ b/ui/app/api/mirrors/state/route.ts @@ -1,30 +1,11 @@ -import { - MirrorStatusRequest, - MirrorStatusResponse, -} from '@/grpc_generated/route'; -import { - GetFlowServiceHttpClient, - ParseFlowServiceErrorMessage, -} from '@/rpc/http'; +import { MirrorStatusRequest } from '@/grpc_generated/route'; +import { GetFlowServiceHttpClient } from '@/rpc/http'; export async function POST(request: Request) { const body: MirrorStatusRequest = await request.json(); const flowServiceClient = GetFlowServiceHttpClient(); - try { - const res: MirrorStatusResponse = await flowServiceClient - .get(`/v1/mirrors/${body.flowJobName}?`, { - params: { - include_flow_info: 'true', - }, - headers: { - cache: 'no-store', - }, - }) - .then((res) => res.data); - - return new Response(JSON.stringify(res)); - } catch (e) { - const message = ParseFlowServiceErrorMessage(e); - console.error(message, e); - } + return flowServiceClient.raw( + `/v1/mirrors/${body.flowJobName}?include_flow_info=true`, + { cache: 'no-store' } + ); } diff --git a/ui/app/api/mirrors/state_change/route.ts b/ui/app/api/mirrors/state_change/route.ts index 47d8fd645a..21e18a512f 100644 --- a/ui/app/api/mirrors/state_change/route.ts +++ b/ui/app/api/mirrors/state_change/route.ts @@ -8,13 +8,14 @@ export async function POST(request: Request) { const body = await request.json(); const flowServiceClient = GetFlowServiceHttpClient(); try { - const res: FlowStateChangeResponse = await flowServiceClient - .post(`/v1/mirrors/state_change`, body) - .then((res) => res.data); - + const res: FlowStateChangeResponse = + await flowServiceClient.post( + `/v1/mirrors/state_change`, + body + ); return new Response(JSON.stringify(res)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); } } diff --git a/ui/app/api/peers/columns/route.ts b/ui/app/api/peers/columns/route.ts index 9d49881e39..c32e8611b9 100644 --- a/ui/app/api/peers/columns/route.ts +++ b/ui/app/api/peers/columns/route.ts @@ -10,17 +10,16 @@ export async function POST(request: Request) { const { peerName, schemaName, tableName } = body; const flowServiceClient = GetFlowServiceHttpClient(); try { - const columnsList: TableColumnsResponse = await flowServiceClient - .get( + const columnsList: TableColumnsResponse = + await flowServiceClient.get( `/v1/peers/columns?peer_name=${peerName}&schema_name=${schemaName}&table_name=${tableName}` - ) - .then((res) => res.data); + ); let response: UColumnsResponse = { columns: columnsList.columns, }; return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.log(message, e); } } diff --git a/ui/app/api/peers/drop/route.ts b/ui/app/api/peers/drop/route.ts index e71fb11bd5..a7b4fe234f 100644 --- a/ui/app/api/peers/drop/route.ts +++ b/ui/app/api/peers/drop/route.ts @@ -14,9 +14,10 @@ export async function POST(request: Request) { }; console.log('/drop/peer: req:', req); try { - const dropStatus = await flowServiceClient - .post(`/v1/peers/drop`, req) - .then((res) => res.data); + const dropStatus = await flowServiceClient.post( + `/v1/peers/drop`, + req + ); let response: UDropPeerResponse = { dropped: dropStatus.ok, errorMessage: dropStatus.errorMessage, @@ -24,7 +25,7 @@ export async function POST(request: Request) { return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); } } diff --git a/ui/app/api/peers/publications/route.ts b/ui/app/api/peers/publications/route.ts index d7abddc189..e381796d5a 100644 --- a/ui/app/api/peers/publications/route.ts +++ b/ui/app/api/peers/publications/route.ts @@ -10,18 +10,17 @@ export async function POST(request: Request) { const { peerName } = body; const flowServiceClient = GetFlowServiceHttpClient(); try { - const publicationList: PeerPublicationsResponse = await flowServiceClient - .get( + const publicationList: PeerPublicationsResponse = + await flowServiceClient.get( `/v1/peers/publications?peer_name=${peerName}` - ) - .then((res) => res.data); + ); let response: UPublicationsResponse = { publicationNames: publicationList.publicationNames, }; console.log(response); return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.log(message, e); } } diff --git a/ui/app/api/peers/route.ts b/ui/app/api/peers/route.ts index a55cc91c6a..48cadb0943 100644 --- a/ui/app/api/peers/route.ts +++ b/ui/app/api/peers/route.ts @@ -110,7 +110,7 @@ export async function POST(request: Request) { const validateReq: ValidatePeerRequest = { peer }; try { const validateStatus: ValidatePeerResponse = await flowServiceClient - .post(`/v1/peers/validate`, validateReq) + .post(`/v1/peers/validate`, validateReq) .then((res) => { return res.data; }); @@ -122,15 +122,16 @@ export async function POST(request: Request) { }; return new Response(JSON.stringify(response)); } catch (error) { - const message = ParseFlowServiceErrorMessage(error); + const message = await ParseFlowServiceErrorMessage(error); console.error('Error validating peer:', message, error); } } else if (mode === 'create') { const req: CreatePeerRequest = { peer }; try { - const createStatus = await flowServiceClient - .post(`/v1/peers/create`, req) - .then((res) => res.data); + const createStatus = await flowServiceClient.post( + `/v1/peers/create`, + req + ); let response: UCreatePeerResponse = { created: createPeerStatusFromJSON(createStatus.status) === @@ -139,7 +140,7 @@ export async function POST(request: Request) { }; return new Response(JSON.stringify(response)); } catch (error) { - const message = ParseFlowServiceErrorMessage(error); + const message = await ParseFlowServiceErrorMessage(error); console.error('Error creating peer:', message, error); } } diff --git a/ui/app/api/peers/schemas/route.ts b/ui/app/api/peers/schemas/route.ts index ebc15ad3cc..a355187670 100644 --- a/ui/app/api/peers/schemas/route.ts +++ b/ui/app/api/peers/schemas/route.ts @@ -10,15 +10,15 @@ export async function POST(request: Request) { const { peerName } = body; const flowServiceClient = GetFlowServiceHttpClient(); try { - const schemaList = await flowServiceClient - .get(`/v1/peers/schemas?peer_name=${peerName}`) - .then((res) => res.data); + const schemaList = await flowServiceClient.get( + `/v1/peers/schemas?peer_name=${peerName}` + ); let response: USchemasResponse = { schemas: schemaList.schemas, }; return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.log(message, e); } } diff --git a/ui/app/api/peers/tables/all/route.ts b/ui/app/api/peers/tables/all/route.ts index 09ae4b8d53..79c2c48363 100644 --- a/ui/app/api/peers/tables/all/route.ts +++ b/ui/app/api/peers/tables/all/route.ts @@ -10,15 +10,16 @@ export async function POST(request: Request) { const { peerName } = body; const flowServiceClient = GetFlowServiceHttpClient(); try { - const tableList: AllTablesResponse = await flowServiceClient - .get(`/v1/peers/tables/all?peer_name=${peerName}`) - .then((res) => res.data); + const tableList: AllTablesResponse = + await flowServiceClient.get( + `/v1/peers/tables/all?peer_name=${peerName}` + ); let response: UTablesAllResponse = { tables: tableList.tables, }; return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); } } diff --git a/ui/app/api/peers/tables/route.ts b/ui/app/api/peers/tables/route.ts index 9b5b4c7a20..f6eda400c1 100644 --- a/ui/app/api/peers/tables/route.ts +++ b/ui/app/api/peers/tables/route.ts @@ -10,17 +10,16 @@ export async function POST(request: Request) { const { peerName, schemaName } = body; const flowServiceClient = GetFlowServiceHttpClient(); try { - const tableList: SchemaTablesResponse = await flowServiceClient - .get( + const tableList: SchemaTablesResponse = + await flowServiceClient.get( `/v1/peers/tables?peer_name=${peerName}&schema_name=${schemaName}` - ) - .then((res) => res.data); + ); let response: UTablesResponse = { tables: tableList.tables, }; return new Response(JSON.stringify(response)); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.log(message, e); } } diff --git a/ui/app/api/version/route.ts b/ui/app/api/version/route.ts index c62c1741c9..1c3a43c38e 100644 --- a/ui/app/api/version/route.ts +++ b/ui/app/api/version/route.ts @@ -9,15 +9,14 @@ export const dynamic = 'force-dynamic'; export async function GET() { const flowServiceClient = GetFlowServiceHttpClient(); try { - const versionResponse = await flowServiceClient - .get(`/v1/version`) - .then((res) => res.data); + const versionResponse = + await flowServiceClient.get(`/v1/version`); let response = { version: versionResponse.version, }; return new Response(JSON.stringify(response)); } catch (error) { - const message = ParseFlowServiceErrorMessage(error); + const message = await ParseFlowServiceErrorMessage(error); console.error('Error getting version:', message); return new Response(JSON.stringify({ error: message })); } diff --git a/ui/app/mirrors/[mirrorId]/page.tsx b/ui/app/mirrors/[mirrorId]/page.tsx index ea02e0b5b4..7d82abdb38 100644 --- a/ui/app/mirrors/[mirrorId]/page.tsx +++ b/ui/app/mirrors/[mirrorId]/page.tsx @@ -27,13 +27,11 @@ async function getMirrorStatus(mirrorId: string) { const url = getMirrorStatusUrl(mirrorId); const flowServiceClient = GetFlowServiceHttpClient(); try { - return await flowServiceClient - .get(url, { - headers: { cache: 'no-store' }, - }) - .then((res) => res.data); + return await flowServiceClient.get(url, { + headers: { cache: 'no-store' }, + }); } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); } } diff --git a/ui/app/peers/[peerName]/page.tsx b/ui/app/peers/[peerName]/page.tsx index 1dfecb1673..af6fc71cc9 100644 --- a/ui/app/peers/[peerName]/page.tsx +++ b/ui/app/peers/[peerName]/page.tsx @@ -18,14 +18,11 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const getSlotData = async () => { const flowServiceClient = GetFlowServiceHttpClient(); try { - const peerSlots: PeerSlotResponse = await flowServiceClient - .get(`/v1/peers/slots/${peerName}`, { - headers: { - cache: 'no-store', - }, - }) - .then((res) => res.data); - + const peerSlots: PeerSlotResponse = + await flowServiceClient.get( + `/v1/peers/slots/${peerName}`, + { cache: 'no-store' } + ); const slotArray = peerSlots.slotData; // slots with 'peerflow_slot' should come first slotArray?.sort((slotA, slotB) => { @@ -45,7 +42,7 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => { }); return slotArray; } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); return []; } @@ -54,17 +51,14 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const getStatData = async () => { const flowServiceClient = GetFlowServiceHttpClient(); try { - const peerStats: PeerStatResponse = await flowServiceClient - .get(`/v1/peers/stats/${peerName}`, { - headers: { - cache: 'no-store', - }, - }) - .then((res) => res.data); - + const peerStats: PeerStatResponse = + await flowServiceClient.get( + `/v1/peers/stats/${peerName}`, + { cache: 'no-store' } + ); return peerStats.statData; } catch (e) { - const message = ParseFlowServiceErrorMessage(e); + const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); return []; } diff --git a/ui/package-lock.json b/ui/package-lock.json index 96e30300be..db25c42667 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -30,7 +30,6 @@ "@types/node": "^20.14.0", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", - "axios": "^1.7.2", "bcrypt": "^5.1.1", "classnames": "^2.5.1", "long": "^5.2.3", @@ -3170,11 +3169,6 @@ "dev": true, "license": "MIT" }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" - }, "node_modules/autoprefixer": { "version": "10.4.19", "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", @@ -3239,16 +3233,6 @@ "node": ">=4" } }, - "node_modules/axios": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz", - "integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==", - "dependencies": { - "follow-redirects": "^1.15.6", - "form-data": "^4.0.0", - "proxy-from-env": "^1.1.0" - } - }, "node_modules/axobject-query": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz", @@ -3614,17 +3598,6 @@ "color-support": "bin.js" } }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/commander": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz", @@ -4041,14 +4014,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", @@ -5041,25 +5006,6 @@ "dev": true, "license": "ISC" }, - "node_modules/follow-redirects": { - "version": "1.15.6", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", - "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, "node_modules/for-each": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", @@ -5086,19 +5032,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/fraction.js": { "version": "4.3.7", "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", @@ -6660,6 +6593,7 @@ "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, "license": "MIT", "engines": { "node": ">= 0.6" @@ -6669,6 +6603,7 @@ "version": "2.1.35", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, "license": "MIT", "dependencies": { "mime-db": "1.52.0" @@ -7798,11 +7733,6 @@ "node": ">=12.0.0" } }, - "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" - }, "node_modules/prr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", diff --git a/ui/package.json b/ui/package.json index c253325e09..b71d6dca77 100644 --- a/ui/package.json +++ b/ui/package.json @@ -32,7 +32,6 @@ "@types/node": "^20.14.0", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", - "axios": "^1.7.2", "bcrypt": "^5.1.1", "classnames": "^2.5.1", "long": "^5.2.3", diff --git a/ui/rpc/http.ts b/ui/rpc/http.ts index 265b6c05a9..457f8bba73 100644 --- a/ui/rpc/http.ts +++ b/ui/rpc/http.ts @@ -1,29 +1,57 @@ import { GetAPIToken } from '@/rpc/token'; -import { ServiceError } from '@grpc/grpc-js'; -import axios from 'axios'; export function GetFlowHttpAddressFromEnv() { return process.env.PEERDB_FLOW_SERVER_HTTP!; } -const flowServiceHttpClient = axios.create({ - baseURL: GetFlowHttpAddressFromEnv(), - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${GetAPIToken()}`, - }, +function handleResponse(res: Response) { + if (!res.ok) { + throw res; + } + return res.json(); +} + +class Client { + baseUrl: string; + headers: { [key: string]: any }; + + constructor(baseUrl: string, headers: { [key: string]: any }) { + this.baseUrl = baseUrl; + this.headers = headers; + } + + raw(path: string, headers?: { [key: string]: any }) { + return fetch(this.baseUrl + path, { ...this.headers, ...headers }); + } + + get(path: string, headers?: { [key: string]: any }) { + return this.raw(path, headers).then(handleResponse); + } + + post(path: string, headers?: { [key: string]: any }) { + return this.raw(path, { + method: 'POST', + ...headers, + }).then(handleResponse); + } +} + +const flowServiceHttpClient = new Client(GetFlowHttpAddressFromEnv(), { + 'Content-Type': 'application/json', + Authorization: `Bearer ${GetAPIToken()}`, }); export function GetFlowServiceHttpClient() { return flowServiceHttpClient; } -export function ParseFlowServiceErrorMessage(error: any) { - if (axios.isAxiosError(error)) { - if (error.response && (error.response.data as ServiceError).code) { - return (error.response.data as ServiceError).message; - } - return error.response?.data || error.message; +export async function ParseFlowServiceErrorMessage(error: any) { + if (error instanceof Response) { + const text = await error.text(); + return `${error.status} ${error.statusText} ${text}`; + } else if (error instanceof Error) { + return error.message; + } else { + return error; } - return error; } diff --git a/ui/rpc/token.ts b/ui/rpc/token.ts index a5bb88acc3..04408cd183 100644 --- a/ui/rpc/token.ts +++ b/ui/rpc/token.ts @@ -6,7 +6,7 @@ function hashPassword(password: string, rounds: number) { } export function GetAPIToken() { - const password = process.env.PEERDB_PASSWORD!; + const password = process.env.PEERDB_PASSWORD ?? ''; const hashedPassword = hashPassword(password, 10); return Buffer.from(hashedPassword).toString('base64'); } From f05234e2bd1154f97fb9f0a9ffd28a1df091bbf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philip=20Dub=C3=A9?= Date: Thu, 6 Jun 2024 20:12:38 +0000 Subject: [PATCH 27/31] npm run format --- ui/app/api/mirrors/cdc/validate/route.ts | 5 +---- ui/app/api/mirrors/drop/route.ts | 6 ++++-- ui/app/api/mirrors/qrep/route.ts | 9 ++++----- ui/app/api/mirrors/state_change/route.ts | 9 ++++----- ui/app/api/peers/columns/route.ts | 7 +++---- ui/app/api/peers/drop/route.ts | 7 ++----- ui/app/api/peers/route.ts | 1 - ui/app/api/peers/schemas/route.ts | 1 - ui/app/api/peers/tables/all/route.ts | 7 +++---- ui/app/api/peers/tables/route.ts | 7 +++---- ui/app/api/version/route.ts | 4 +--- ui/app/mirrors/[mirrorId]/page.tsx | 1 - ui/app/peers/[peerName]/page.tsx | 18 ++++++++---------- 13 files changed, 33 insertions(+), 49 deletions(-) diff --git a/ui/app/api/mirrors/cdc/validate/route.ts b/ui/app/api/mirrors/cdc/validate/route.ts index 7e57f949f8..1d9ed31a0a 100644 --- a/ui/app/api/mirrors/cdc/validate/route.ts +++ b/ui/app/api/mirrors/cdc/validate/route.ts @@ -17,10 +17,7 @@ export async function POST(request: NextRequest) { }; try { const validateResponse: ValidateCDCMirrorResponse = - await flowServiceClient.post( - `/v1/mirrors/cdc/validate`, - req - ); + await flowServiceClient.post(`/v1/mirrors/cdc/validate`, req); return new Response(JSON.stringify(validateResponse)); } catch (e) { const message = await ParseFlowServiceErrorMessage(e); diff --git a/ui/app/api/mirrors/drop/route.ts b/ui/app/api/mirrors/drop/route.ts index a16fc52d4e..948b042eb0 100644 --- a/ui/app/api/mirrors/drop/route.ts +++ b/ui/app/api/mirrors/drop/route.ts @@ -18,8 +18,10 @@ export async function POST(request: Request) { }; try { - const dropStatus: ShutdownResponse = - await flowServiceClient.post(`/v1/mirrors/drop`, req); + const dropStatus: ShutdownResponse = await flowServiceClient.post( + `/v1/mirrors/drop`, + req + ); let response: UDropMirrorResponse = { dropped: dropStatus.ok, errorMessage: dropStatus.errorMessage, diff --git a/ui/app/api/mirrors/qrep/route.ts b/ui/app/api/mirrors/qrep/route.ts index ac605ebed8..6ded5afe38 100644 --- a/ui/app/api/mirrors/qrep/route.ts +++ b/ui/app/api/mirrors/qrep/route.ts @@ -18,11 +18,10 @@ export async function POST(request: Request) { createCatalogEntry: true, }; try { - const createStatus: CreateQRepFlowResponse = - await flowServiceClient.post( - `/v1/flows/qrep/create`, - req - ); + const createStatus: CreateQRepFlowResponse = await flowServiceClient.post( + `/v1/flows/qrep/create`, + req + ); let response: UCreateMirrorResponse = { created: !!createStatus.workflowId, }; diff --git a/ui/app/api/mirrors/state_change/route.ts b/ui/app/api/mirrors/state_change/route.ts index 21e18a512f..c3c3ff2001 100644 --- a/ui/app/api/mirrors/state_change/route.ts +++ b/ui/app/api/mirrors/state_change/route.ts @@ -8,11 +8,10 @@ export async function POST(request: Request) { const body = await request.json(); const flowServiceClient = GetFlowServiceHttpClient(); try { - const res: FlowStateChangeResponse = - await flowServiceClient.post( - `/v1/mirrors/state_change`, - body - ); + const res: FlowStateChangeResponse = await flowServiceClient.post( + `/v1/mirrors/state_change`, + body + ); return new Response(JSON.stringify(res)); } catch (e) { const message = await ParseFlowServiceErrorMessage(e); diff --git a/ui/app/api/peers/columns/route.ts b/ui/app/api/peers/columns/route.ts index c32e8611b9..4417f9f2a2 100644 --- a/ui/app/api/peers/columns/route.ts +++ b/ui/app/api/peers/columns/route.ts @@ -10,10 +10,9 @@ export async function POST(request: Request) { const { peerName, schemaName, tableName } = body; const flowServiceClient = GetFlowServiceHttpClient(); try { - const columnsList: TableColumnsResponse = - await flowServiceClient.get( - `/v1/peers/columns?peer_name=${peerName}&schema_name=${schemaName}&table_name=${tableName}` - ); + const columnsList: TableColumnsResponse = await flowServiceClient.get( + `/v1/peers/columns?peer_name=${peerName}&schema_name=${schemaName}&table_name=${tableName}` + ); let response: UColumnsResponse = { columns: columnsList.columns, }; diff --git a/ui/app/api/peers/drop/route.ts b/ui/app/api/peers/drop/route.ts index a7b4fe234f..2bc462f16d 100644 --- a/ui/app/api/peers/drop/route.ts +++ b/ui/app/api/peers/drop/route.ts @@ -1,5 +1,5 @@ import { UDropPeerResponse } from '@/app/dto/PeersDTO'; -import { DropPeerRequest, DropPeerResponse } from '@/grpc_generated/route'; +import { DropPeerRequest } from '@/grpc_generated/route'; import { GetFlowServiceHttpClient, ParseFlowServiceErrorMessage, @@ -14,10 +14,7 @@ export async function POST(request: Request) { }; console.log('/drop/peer: req:', req); try { - const dropStatus = await flowServiceClient.post( - `/v1/peers/drop`, - req - ); + const dropStatus = await flowServiceClient.post(`/v1/peers/drop`, req); let response: UDropPeerResponse = { dropped: dropStatus.ok, errorMessage: dropStatus.errorMessage, diff --git a/ui/app/api/peers/route.ts b/ui/app/api/peers/route.ts index 48cadb0943..a503882ba0 100644 --- a/ui/app/api/peers/route.ts +++ b/ui/app/api/peers/route.ts @@ -21,7 +21,6 @@ import { } from '@/grpc_generated/peers'; import { CreatePeerRequest, - CreatePeerResponse, CreatePeerStatus, ValidatePeerRequest, ValidatePeerResponse, diff --git a/ui/app/api/peers/schemas/route.ts b/ui/app/api/peers/schemas/route.ts index a355187670..4d2b6aa8a8 100644 --- a/ui/app/api/peers/schemas/route.ts +++ b/ui/app/api/peers/schemas/route.ts @@ -1,5 +1,4 @@ import { USchemasResponse } from '@/app/dto/PeersDTO'; -import { PeerSchemasResponse } from '@/grpc_generated/route'; import { GetFlowServiceHttpClient, ParseFlowServiceErrorMessage, diff --git a/ui/app/api/peers/tables/all/route.ts b/ui/app/api/peers/tables/all/route.ts index 79c2c48363..021b5f4799 100644 --- a/ui/app/api/peers/tables/all/route.ts +++ b/ui/app/api/peers/tables/all/route.ts @@ -10,10 +10,9 @@ export async function POST(request: Request) { const { peerName } = body; const flowServiceClient = GetFlowServiceHttpClient(); try { - const tableList: AllTablesResponse = - await flowServiceClient.get( - `/v1/peers/tables/all?peer_name=${peerName}` - ); + const tableList: AllTablesResponse = await flowServiceClient.get( + `/v1/peers/tables/all?peer_name=${peerName}` + ); let response: UTablesAllResponse = { tables: tableList.tables, }; diff --git a/ui/app/api/peers/tables/route.ts b/ui/app/api/peers/tables/route.ts index f6eda400c1..b8a3c3f842 100644 --- a/ui/app/api/peers/tables/route.ts +++ b/ui/app/api/peers/tables/route.ts @@ -10,10 +10,9 @@ export async function POST(request: Request) { const { peerName, schemaName } = body; const flowServiceClient = GetFlowServiceHttpClient(); try { - const tableList: SchemaTablesResponse = - await flowServiceClient.get( - `/v1/peers/tables?peer_name=${peerName}&schema_name=${schemaName}` - ); + const tableList: SchemaTablesResponse = await flowServiceClient.get( + `/v1/peers/tables?peer_name=${peerName}&schema_name=${schemaName}` + ); let response: UTablesResponse = { tables: tableList.tables, }; diff --git a/ui/app/api/version/route.ts b/ui/app/api/version/route.ts index 1c3a43c38e..dd22a41eb4 100644 --- a/ui/app/api/version/route.ts +++ b/ui/app/api/version/route.ts @@ -1,4 +1,3 @@ -import { PeerDBVersionResponse } from '@/grpc_generated/route'; import { GetFlowServiceHttpClient, ParseFlowServiceErrorMessage, @@ -9,8 +8,7 @@ export const dynamic = 'force-dynamic'; export async function GET() { const flowServiceClient = GetFlowServiceHttpClient(); try { - const versionResponse = - await flowServiceClient.get(`/v1/version`); + const versionResponse = await flowServiceClient.get(`/v1/version`); let response = { version: versionResponse.version, }; diff --git a/ui/app/mirrors/[mirrorId]/page.tsx b/ui/app/mirrors/[mirrorId]/page.tsx index 7d82abdb38..8dc6a3f703 100644 --- a/ui/app/mirrors/[mirrorId]/page.tsx +++ b/ui/app/mirrors/[mirrorId]/page.tsx @@ -3,7 +3,6 @@ import prisma from '@/app/utils/prisma'; import MirrorActions from '@/components/MirrorActionsDropdown'; import { FlowConnectionConfigs, FlowStatus } from '@/grpc_generated/flow'; import { DBType } from '@/grpc_generated/peers'; -import { MirrorStatusResponse } from '@/grpc_generated/route'; import { Header } from '@/lib/Header'; import { LayoutMain } from '@/lib/Layout'; import { diff --git a/ui/app/peers/[peerName]/page.tsx b/ui/app/peers/[peerName]/page.tsx index af6fc71cc9..d267c75631 100644 --- a/ui/app/peers/[peerName]/page.tsx +++ b/ui/app/peers/[peerName]/page.tsx @@ -18,11 +18,10 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const getSlotData = async () => { const flowServiceClient = GetFlowServiceHttpClient(); try { - const peerSlots: PeerSlotResponse = - await flowServiceClient.get( - `/v1/peers/slots/${peerName}`, - { cache: 'no-store' } - ); + const peerSlots: PeerSlotResponse = await flowServiceClient.get( + `/v1/peers/slots/${peerName}`, + { cache: 'no-store' } + ); const slotArray = peerSlots.slotData; // slots with 'peerflow_slot' should come first slotArray?.sort((slotA, slotB) => { @@ -51,11 +50,10 @@ const PeerData = async ({ params: { peerName } }: DataConfigProps) => { const getStatData = async () => { const flowServiceClient = GetFlowServiceHttpClient(); try { - const peerStats: PeerStatResponse = - await flowServiceClient.get( - `/v1/peers/stats/${peerName}`, - { cache: 'no-store' } - ); + const peerStats: PeerStatResponse = await flowServiceClient.get( + `/v1/peers/stats/${peerName}`, + { cache: 'no-store' } + ); return peerStats.statData; } catch (e) { const message = await ParseFlowServiceErrorMessage(e); From 5666d5072e63269c2e83fef153f4ae030ec49357 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philip=20Dub=C3=A9?= Date: Thu, 6 Jun 2024 20:29:35 +0000 Subject: [PATCH 28/31] headers go in headers --- ui/app/mirrors/[mirrorId]/page.tsx | 4 +--- ui/rpc/http.ts | 12 ++++++------ 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/ui/app/mirrors/[mirrorId]/page.tsx b/ui/app/mirrors/[mirrorId]/page.tsx index 8dc6a3f703..14f0dc1980 100644 --- a/ui/app/mirrors/[mirrorId]/page.tsx +++ b/ui/app/mirrors/[mirrorId]/page.tsx @@ -26,9 +26,7 @@ async function getMirrorStatus(mirrorId: string) { const url = getMirrorStatusUrl(mirrorId); const flowServiceClient = GetFlowServiceHttpClient(); try { - return await flowServiceClient.get(url, { - headers: { cache: 'no-store' }, - }); + return await flowServiceClient.get(url, { cache: 'no-store' }); } catch (e) { const message = await ParseFlowServiceErrorMessage(e); console.error(message, e); diff --git a/ui/rpc/http.ts b/ui/rpc/http.ts index 457f8bba73..812521b9bb 100644 --- a/ui/rpc/http.ts +++ b/ui/rpc/http.ts @@ -20,18 +20,18 @@ class Client { this.headers = headers; } - raw(path: string, headers?: { [key: string]: any }) { - return fetch(this.baseUrl + path, { ...this.headers, ...headers }); + raw(path: string, options?: { [key: string]: any }) { + return fetch(this.baseUrl + path, { headers: this.headers, ...options }); } - get(path: string, headers?: { [key: string]: any }) { - return this.raw(path, headers).then(handleResponse); + get(path: string, options?: { [key: string]: any }) { + return this.raw(path, options).then(handleResponse); } - post(path: string, headers?: { [key: string]: any }) { + post(path: string, options?: { [key: string]: any }) { return this.raw(path, { method: 'POST', - ...headers, + ...options, }).then(handleResponse); } } From 1798a85c20b63389abafa21a44a6c1a1c0b02d4e Mon Sep 17 00:00:00 2001 From: Kevin Biju Date: Tue, 11 Jun 2024 03:57:23 +0530 Subject: [PATCH 29/31] back to basics (auth) and route fixes --- docker-compose-dev.yml | 2 - flow/cmd/api.go | 6 +- flow/go.mod | 1 + flow/go.sum | 2 + flow/middleware/auth.go | 32 ++- flow/middleware/interceptors.go | 22 +- flow/peerdbenv/config.go | 2 +- nexus/Cargo.lock | 24 -- nexus/flow-rs/Cargo.toml | 1 - nexus/flow-rs/src/grpc.rs | 5 +- ui/app/api/version/route.ts | 6 +- ui/components/SidebarComponent.tsx | 16 +- ui/package-lock.json | 389 ++--------------------------- ui/package.json | 2 - ui/rpc/http.ts | 28 ++- ui/rpc/token.ts | 15 +- 16 files changed, 117 insertions(+), 436 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 1868c755bf..2f1820d28f 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -168,7 +168,6 @@ services: environment: <<: *catalog-config PEERDB_LOG_DIR: /var/log/peerdb - PEERDB_PASSWORD: peerdb PEERDB_FLOW_SERVER_ADDRESS: grpc://flow_api:8112 RUST_LOG: info RUST_BACKTRACE: 1 @@ -189,7 +188,6 @@ services: <<: *catalog-config DATABASE_URL: postgres://postgres:postgres@catalog:5432/postgres PEERDB_FLOW_SERVER_HTTP: http://flow_api:8113 - PEERDB_PASSWORD: NEXTAUTH_SECRET: __changeme__ NEXTAUTH_URL: http://localhost:3000 depends_on: diff --git a/flow/cmd/api.go b/flow/cmd/api.go index 1b68d60fe7..50910bef4f 100644 --- a/flow/cmd/api.go +++ b/flow/cmd/api.go @@ -120,10 +120,14 @@ func APIMain(ctx context.Context, args *APIServerParams) error { grpc_health_v1.Health_Check_FullMethodName, grpc_health_v1.Health_Watch_FullMethodName, } + authInterceptor, err := middleware.CreateAuthServerInterceptor(ctx, healthMethods) + if err != nil { + return fmt.Errorf("unable to create auth middleware: %w", err) + } grpcServer := grpc.NewServer( grpc.ChainUnaryInterceptor( middleware.CreateRequestLoggingInterceptor(healthMethods), - middleware.CreateAuthServerInterceptor(ctx, peerdbenv.PeerDBPassword(), healthMethods), + authInterceptor, ), ) diff --git a/flow/go.mod b/flow/go.mod index f16ccf1f1c..404a6924f9 100644 --- a/flow/go.mod +++ b/flow/go.mod @@ -103,6 +103,7 @@ require ( github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect github.com/gorilla/websocket v1.5.1 // indirect github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect + github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/mtibben/percent v0.2.1 // indirect diff --git a/flow/go.sum b/flow/go.sum index 5b8b6c3a07..102f58caa8 100644 --- a/flow/go.sum +++ b/flow/go.sum @@ -268,6 +268,8 @@ github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1 github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 h1:Dj0L5fhJ9F82ZJyVOmBx6msDp/kfd1t9GRfny/mfJA0= diff --git a/flow/middleware/auth.go b/flow/middleware/auth.go index 2f513d083e..499c9f884a 100644 --- a/flow/middleware/auth.go +++ b/flow/middleware/auth.go @@ -6,16 +6,20 @@ import ( "log/slog" "strings" + "github.com/hashicorp/golang-lru/v2/expirable" "golang.org/x/crypto/bcrypt" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" "github.com/PeerDB-io/peer-flow/logger" + "github.com/PeerDB-io/peer-flow/peerdbenv" ) -// Authorize checks the authorization metadata and compares the incoming bearer token with the plaintext -func Authorize(ctx context.Context, plaintext string) (context.Context, error) { +const hashedKey = "sognodivolare" + +// authorize checks the authorization metadata and compares the incoming bearer token with the plaintext +func authorize(ctx context.Context, hashCache *expirable.LRU[string, []byte]) (context.Context, error) { md, _ := metadata.FromIncomingContext(ctx) if len(md["authorization"]) == 0 { return nil, status.Errorf(codes.Unauthenticated, "Authorization token is required") @@ -33,9 +37,31 @@ func Authorize(ctx context.Context, plaintext string) (context.Context, error) { logger.LoggerFromCtx(ctx).Warn("Error decoding token", slog.String("token", base64Token), slog.Any("error", err)) return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") } - if err := bcrypt.CompareHashAndPassword(tokenBytes, []byte(plaintext)); err != nil { + + hash, err := getCachedHash(hashCache) + if err != nil || hash == nil { + logger.LoggerFromCtx(ctx).Warn("Error getting hash", slog.Any("error", err)) + return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") + } + if err := bcrypt.CompareHashAndPassword(hash, tokenBytes); err != nil { logger.LoggerFromCtx(ctx).Warn("Error validating token", slog.String("token", string(tokenBytes)), slog.Any("error", err)) return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") } return ctx, nil } + +func getCachedHash(hashCache *expirable.LRU[string, []byte]) ([]byte, error) { + if value, ok := hashCache.Get(hashedKey); ok { + return value, nil + } + plaintext := peerdbenv.PeerDBPassword() + if plaintext == "" { + return nil, nil + } + hashedPassword, err := bcrypt.GenerateFromPassword([]byte(plaintext), bcrypt.DefaultCost) + if err != nil { + return nil, err + } + hashCache.Add(hashedKey, hashedPassword) + return hashedPassword, nil +} diff --git a/flow/middleware/interceptors.go b/flow/middleware/interceptors.go index ebaf15ec35..8b06f85ca7 100644 --- a/flow/middleware/interceptors.go +++ b/flow/middleware/interceptors.go @@ -2,37 +2,51 @@ package middleware import ( "context" + "fmt" "log/slog" "time" + "github.com/hashicorp/golang-lru/v2/expirable" "google.golang.org/grpc" "google.golang.org/grpc/peer" "google.golang.org/grpc/status" "github.com/PeerDB-io/peer-flow/logger" + "github.com/PeerDB-io/peer-flow/peerdbenv" ) -func CreateAuthServerInterceptor(ctx context.Context, plaintext string, unauthenticatedMethods []string) grpc.UnaryServerInterceptor { +func CreateAuthServerInterceptor(ctx context.Context, unauthenticatedMethods []string) (grpc.UnaryServerInterceptor, error) { + plaintext := peerdbenv.PeerDBPassword() + if plaintext == "" { logger.LoggerFromCtx(ctx).Warn("Authentication is disabled") return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (any, error) { return handler(ctx, req) - } + }, nil } + unauthenticatedMethodsSet := make(map[string]struct{}) for _, method := range unauthenticatedMethods { unauthenticatedMethodsSet[method] = struct{}{} } + + // accommodate live password changes and reduce time hash is in memory + hashCache := expirable.NewLRU[string, []byte](1, nil, 10*time.Minute) + _, err := getCachedHash(hashCache) + if err != nil { + return nil, fmt.Errorf("error generating hash: %w", err) + } + return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (any, error) { if _, ok := unauthenticatedMethodsSet[info.FullMethod]; ok { return handler(ctx, req) } - ctx, err := Authorize(ctx, plaintext) + ctx, err := authorize(ctx, hashCache) if err != nil { return nil, err } return handler(ctx, req) - } + }, nil } // CreateRequestLoggingInterceptor logs all requests diff --git a/flow/peerdbenv/config.go b/flow/peerdbenv/config.go index 6104e44b2a..7e844b33d8 100644 --- a/flow/peerdbenv/config.go +++ b/flow/peerdbenv/config.go @@ -21,7 +21,7 @@ func PeerDBDeploymentUID() string { return GetEnvString("PEERDB_DEPLOYMENT_UID", "") } -// PEERDB_PASSWORD +// PEERDB_PASSWORD: use carefully, this is a secret func PeerDBPassword() string { return GetEnvString("PEERDB_PASSWORD", "") } diff --git a/nexus/Cargo.lock b/nexus/Cargo.lock index 6413503b80..12c5dc5f89 100644 --- a/nexus/Cargo.lock +++ b/nexus/Cargo.lock @@ -319,19 +319,6 @@ dependencies = [ "smallvec", ] -[[package]] -name = "bcrypt" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e65938ed058ef47d92cf8b346cc76ef48984572ade631927e9937b5ffc7662c7" -dependencies = [ - "base64 0.22.1", - "blowfish", - "getrandom", - "subtle", - "zeroize", -] - [[package]] name = "bindgen" version = "0.69.4" @@ -394,16 +381,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "blowfish" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e412e2cd0f2b2d93e02543ceae7917b3c70331573df19ee046bcbc35e45e87d7" -dependencies = [ - "byteorder", - "cipher", -] - [[package]] name = "borsh" version = "1.5.1" @@ -1010,7 +987,6 @@ version = "0.1.0" dependencies = [ "anyhow", "base64 0.22.1", - "bcrypt", "catalog", "pt", "serde_json", diff --git a/nexus/flow-rs/Cargo.toml b/nexus/flow-rs/Cargo.toml index 2f5edf72dd..69596eba19 100644 --- a/nexus/flow-rs/Cargo.toml +++ b/nexus/flow-rs/Cargo.toml @@ -10,5 +10,4 @@ tracing.workspace = true tonic-health = "0.11" pt = { path = "../pt" } catalog = { path = "../catalog" } -bcrypt = "0.15.1" base64 = "0.22.1" diff --git a/nexus/flow-rs/src/grpc.rs b/nexus/flow-rs/src/grpc.rs index 171510ba6f..0957d6be5c 100644 --- a/nexus/flow-rs/src/grpc.rs +++ b/nexus/flow-rs/src/grpc.rs @@ -50,9 +50,8 @@ impl FlowGrpcClient { // Create a gRPC channel let channel = tonic::transport::Channel::from_shared(grpc_endpoint.clone())?.connect_lazy(); - // Setup the token by hashing the password and base64 encoding it - let hashed_password = bcrypt::hash(password, bcrypt::DEFAULT_COST).unwrap(); - let token = base64::prelude::BASE64_STANDARD.encode(hashed_password.as_bytes()); + // encode the password to base64, to send in all requests + let token = base64::prelude::BASE64_STANDARD.encode(password.as_bytes()); // use the token in all requests let interceptor = BearerAuthInterceptor { token }; diff --git a/ui/app/api/version/route.ts b/ui/app/api/version/route.ts index dd22a41eb4..7371ba4269 100644 --- a/ui/app/api/version/route.ts +++ b/ui/app/api/version/route.ts @@ -14,8 +14,12 @@ export async function GET() { }; return new Response(JSON.stringify(response)); } catch (error) { + if (error instanceof Response) { + return error; + } + const message = await ParseFlowServiceErrorMessage(error); console.error('Error getting version:', message); - return new Response(JSON.stringify({ error: message })); + return new Response(JSON.stringify({ error: message }), { status: 500 }); } } diff --git a/ui/components/SidebarComponent.tsx b/ui/components/SidebarComponent.tsx index 5be2e128f6..48feba6c79 100644 --- a/ui/components/SidebarComponent.tsx +++ b/ui/components/SidebarComponent.tsx @@ -1,7 +1,6 @@ 'use client'; import { UVersionResponse } from '@/app/dto/VersionDTO'; -import { fetcher } from '@/app/utils/swr'; import Logout from '@/components/Logout'; import { BrandLogo } from '@/lib/BrandLogo'; import { Button } from '@/lib/Button'; @@ -29,9 +28,16 @@ export default function SidebarComponent() { const { data: version, isLoading, + error, }: { data: UVersionResponse; error: any; isLoading: boolean } = useSWR( '/api/version', - fetcher + async (url: string) => { + const res = await fetch(url); + if (!res.ok) { + throw new Error(); + } + return res.json(); + } ); const [sidebarState, setSidebarState] = useLocalStorage( @@ -114,7 +120,11 @@ export default function SidebarComponent() { ) : ( diff --git a/ui/package-lock.json b/ui/package-lock.json index db25c42667..dab414350b 100644 --- a/ui/package-lock.json +++ b/ui/package-lock.json @@ -30,7 +30,6 @@ "@types/node": "^20.14.0", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", - "bcrypt": "^5.1.1", "classnames": "^2.5.1", "long": "^5.2.3", "lucide-react": "^0.383.0", @@ -52,7 +51,6 @@ "zod": "^3.23.8" }, "devDependencies": { - "@types/bcrypt": "^5.0.2", "autoprefixer": "^10.4.19", "copy-webpack-plugin": "^12.0.2", "eslint": "^8.57.0", @@ -722,25 +720,6 @@ "url": "https://opencollective.com/js-sdsl" } }, - "node_modules/@mapbox/node-pre-gyp": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", - "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", - "dependencies": { - "detect-libc": "^2.0.0", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.7", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - }, - "bin": { - "node-pre-gyp": "bin/node-pre-gyp" - } - }, "node_modules/@monaco-editor/loader": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@monaco-editor/loader/-/loader-1.4.0.tgz", @@ -2236,15 +2215,6 @@ "react-dom": ">=16.6.0" } }, - "node_modules/@types/bcrypt": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/@types/bcrypt/-/bcrypt-5.0.2.tgz", - "integrity": "sha512-6atioO8Y75fNcbmj0G7UjI9lXN2pQ/IGJ2FWT4a/btd0Lk9lQalHLKhkgKVZ3r+spnmWUKfbMi1GEe9wyHQfNQ==", - "dev": true, - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/d3-array": { "version": "3.2.1", "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.1.tgz", @@ -2753,11 +2723,6 @@ "dev": true, "license": "Apache-2.0" }, - "node_modules/abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" - }, "node_modules/acorn": { "version": "8.11.3", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", @@ -2791,17 +2756,6 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "dependencies": { - "debug": "4" - }, - "engines": { - "node": ">= 6.0.0" - } - }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -2914,24 +2868,6 @@ "node": ">= 8" } }, - "node_modules/aproba": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" - }, - "node_modules/are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/arg": { "version": "5.0.2", "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", @@ -3264,19 +3200,6 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "license": "MIT" }, - "node_modules/bcrypt": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", - "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==", - "hasInstallScript": true, - "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.11", - "node-addon-api": "^5.0.0" - }, - "engines": { - "node": ">= 10.0.0" - } - }, "node_modules/binary-extensions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", @@ -3293,6 +3216,7 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", @@ -3482,14 +3406,6 @@ "node": ">= 6" } }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "engines": { - "node": ">=10" - } - }, "node_modules/chrome-trace-event": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.4.tgz", @@ -3590,14 +3506,6 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "license": "MIT" }, - "node_modules/color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "bin": { - "color-support": "bin.js" - } - }, "node_modules/commander": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-11.1.0.tgz", @@ -3619,13 +3527,9 @@ "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, "license": "MIT" }, - "node_modules/console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" - }, "node_modules/convert-source-map": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", @@ -3952,6 +3856,7 @@ "version": "4.3.5", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dev": true, "license": "MIT", "dependencies": { "ms": "2.1.2" @@ -4014,11 +3919,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" - }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -4029,14 +3929,6 @@ "node": ">=6" } }, - "node_modules/detect-libc": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", - "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", - "engines": { - "node": ">=8" - } - }, "node_modules/detect-node-es": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", @@ -5061,32 +4953,11 @@ "node": ">=14.14" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true, "license": "ISC" }, "node_modules/fsevents": { @@ -5141,49 +5012,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/gauge/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/gauge/node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" - }, - "node_modules/gauge/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -5561,11 +5389,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" - }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -5587,18 +5410,6 @@ "react-is": "^16.7.0" } }, - "node_modules/https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", - "dependencies": { - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/iconv-lite": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", @@ -5668,6 +5479,7 @@ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, "license": "ISC", "dependencies": { "once": "^1.3.0", @@ -5678,6 +5490,7 @@ "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true, "license": "ISC" }, "node_modules/internal-slot": { @@ -6514,6 +6327,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, "license": "MIT", "dependencies": { "semver": "^6.0.0" @@ -6529,6 +6343,7 @@ "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -6616,6 +6431,7 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" @@ -6643,40 +6459,6 @@ "node": ">=16 || 14 >=14.17" } }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/moment": { "version": "2.30.1", "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", @@ -6709,6 +6491,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true, "license": "MIT" }, "node_modules/mz": { @@ -6878,30 +6661,6 @@ "node": "^10 || ^12 || >=14" } }, - "node_modules/node-addon-api": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", - "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==" - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, "node_modules/node-releases": { "version": "2.0.14", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", @@ -6909,20 +6668,6 @@ "dev": true, "license": "MIT" }, - "node_modules/nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "dependencies": { - "abbrev": "1" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -6942,18 +6687,6 @@ "node": ">=0.10.0" } }, - "node_modules/npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, "node_modules/oauth": { "version": "0.9.15", "resolved": "https://registry.npmjs.org/oauth/-/oauth-0.9.15.tgz", @@ -7115,6 +6848,7 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, "license": "ISC", "dependencies": { "wrappy": "1" @@ -7249,6 +6983,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -7990,19 +7725,6 @@ "pify": "^2.3.0" } }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -8164,6 +7886,7 @@ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "deprecated": "Rimraf versions prior to v4 are no longer supported", + "dev": true, "license": "ISC", "dependencies": { "glob": "^7.1.3" @@ -8180,6 +7903,7 @@ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, "license": "ISC", "dependencies": { "fs.realpath": "^1.0.0", @@ -8242,6 +7966,7 @@ "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, "funding": [ { "type": "github", @@ -8362,6 +8087,7 @@ "version": "7.6.2", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "dev": true, "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -8380,11 +8106,6 @@ "randombytes": "^2.1.0" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" - }, "node_modules/set-function-length": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", @@ -8534,14 +8255,6 @@ "node": ">=10.0.0" } }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, "node_modules/string-width": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.1.0.tgz", @@ -8981,30 +8694,6 @@ "node": ">=6" } }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/terser": { "version": "5.31.0", "resolved": "https://registry.npmjs.org/terser/-/terser-5.31.0.tgz", @@ -9140,11 +8829,6 @@ "node": ">=8.0" } }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, "node_modules/trim-repeated": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz", @@ -9541,11 +9225,6 @@ "node": ">=10.13.0" } }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, "node_modules/webpack": { "version": "5.91.0", "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.91.0.tgz", @@ -9647,15 +9326,6 @@ "url": "https://opencollective.com/webpack" } }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -9754,32 +9424,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "dependencies": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, - "node_modules/wide-align/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/wide-align/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", @@ -9905,6 +9549,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, "license": "ISC" }, "node_modules/y18n": { diff --git a/ui/package.json b/ui/package.json index b71d6dca77..ed76a188a8 100644 --- a/ui/package.json +++ b/ui/package.json @@ -32,7 +32,6 @@ "@types/node": "^20.14.0", "@types/react": "^18.3.3", "@types/react-dom": "^18.3.0", - "bcrypt": "^5.1.1", "classnames": "^2.5.1", "long": "^5.2.3", "lucide-react": "^0.383.0", @@ -54,7 +53,6 @@ "zod": "^3.23.8" }, "devDependencies": { - "@types/bcrypt": "^5.0.2", "autoprefixer": "^10.4.19", "copy-webpack-plugin": "^12.0.2", "eslint": "^8.57.0", diff --git a/ui/rpc/http.ts b/ui/rpc/http.ts index 812521b9bb..786258f5c8 100644 --- a/ui/rpc/http.ts +++ b/ui/rpc/http.ts @@ -1,4 +1,4 @@ -import { GetAPIToken } from '@/rpc/token'; +import { GetAuthorizationHeader } from '@/rpc/token'; export function GetFlowHttpAddressFromEnv() { return process.env.PEERDB_FLOW_SERVER_HTTP!; @@ -13,9 +13,9 @@ function handleResponse(res: Response) { class Client { baseUrl: string; - headers: { [key: string]: any }; + headers: Headers; - constructor(baseUrl: string, headers: { [key: string]: any }) { + constructor(baseUrl: string, headers: Headers) { this.baseUrl = baseUrl; this.headers = headers; } @@ -24,22 +24,30 @@ class Client { return fetch(this.baseUrl + path, { headers: this.headers, ...options }); } - get(path: string, options?: { [key: string]: any }) { - return this.raw(path, options).then(handleResponse); + async get(path: string, options?: { [key: string]: any }) { + const res = await this.raw(path, options); + return handleResponse(res); } - post(path: string, options?: { [key: string]: any }) { - return this.raw(path, { + async post(path: string, options?: { [key: string]: any }) { + const res = await this.raw(path, { method: 'POST', ...options, - }).then(handleResponse); + }); + return handleResponse(res); } } -const flowServiceHttpClient = new Client(GetFlowHttpAddressFromEnv(), { +const flowServiceHeaders = new Headers({ 'Content-Type': 'application/json', - Authorization: `Bearer ${GetAPIToken()}`, }); +if (GetAuthorizationHeader()) { + flowServiceHeaders.set('Authorization', GetAuthorizationHeader()); +} +const flowServiceHttpClient = new Client( + GetFlowHttpAddressFromEnv(), + flowServiceHeaders +); export function GetFlowServiceHttpClient() { return flowServiceHttpClient; diff --git a/ui/rpc/token.ts b/ui/rpc/token.ts index 04408cd183..85e22bd8fe 100644 --- a/ui/rpc/token.ts +++ b/ui/rpc/token.ts @@ -1,12 +1,9 @@ -import bcrypt from 'bcrypt'; import 'server-only'; -function hashPassword(password: string, rounds: number) { - return bcrypt.hashSync(password, rounds); -} - -export function GetAPIToken() { - const password = process.env.PEERDB_PASSWORD ?? ''; - const hashedPassword = hashPassword(password, 10); - return Buffer.from(hashedPassword).toString('base64'); +export function GetAuthorizationHeader() { + const password = process.env.PEERDB_PASSWORD; + if (!password) { + return ''; + } + return `Bearer ${Buffer.from(password).toString('base64')}`; } From dabb3764006ebebe571a45e3a3ee0100459e504d Mon Sep 17 00:00:00 2001 From: Kevin Biju Date: Tue, 11 Jun 2024 23:12:36 +0530 Subject: [PATCH 30/31] fix review feedback pt.1 --- flow/go.mod | 1 - flow/go.sum | 2 -- flow/middleware/auth.go | 27 +-------------------------- flow/middleware/interceptors.go | 11 ++++------- 4 files changed, 5 insertions(+), 36 deletions(-) diff --git a/flow/go.mod b/flow/go.mod index 404a6924f9..f16ccf1f1c 100644 --- a/flow/go.mod +++ b/flow/go.mod @@ -103,7 +103,6 @@ require ( github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 // indirect github.com/gorilla/websocket v1.5.1 // indirect github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c // indirect - github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/mtibben/percent v0.2.1 // indirect diff --git a/flow/go.sum b/flow/go.sum index 102f58caa8..5b8b6c3a07 100644 --- a/flow/go.sum +++ b/flow/go.sum @@ -268,8 +268,6 @@ github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1 github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= -github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= -github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 h1:Dj0L5fhJ9F82ZJyVOmBx6msDp/kfd1t9GRfny/mfJA0= diff --git a/flow/middleware/auth.go b/flow/middleware/auth.go index 499c9f884a..51729b4a83 100644 --- a/flow/middleware/auth.go +++ b/flow/middleware/auth.go @@ -6,20 +6,16 @@ import ( "log/slog" "strings" - "github.com/hashicorp/golang-lru/v2/expirable" "golang.org/x/crypto/bcrypt" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" "github.com/PeerDB-io/peer-flow/logger" - "github.com/PeerDB-io/peer-flow/peerdbenv" ) -const hashedKey = "sognodivolare" - // authorize checks the authorization metadata and compares the incoming bearer token with the plaintext -func authorize(ctx context.Context, hashCache *expirable.LRU[string, []byte]) (context.Context, error) { +func authorize(ctx context.Context, hash []byte) (context.Context, error) { md, _ := metadata.FromIncomingContext(ctx) if len(md["authorization"]) == 0 { return nil, status.Errorf(codes.Unauthenticated, "Authorization token is required") @@ -38,30 +34,9 @@ func authorize(ctx context.Context, hashCache *expirable.LRU[string, []byte]) (c return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") } - hash, err := getCachedHash(hashCache) - if err != nil || hash == nil { - logger.LoggerFromCtx(ctx).Warn("Error getting hash", slog.Any("error", err)) - return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") - } if err := bcrypt.CompareHashAndPassword(hash, tokenBytes); err != nil { logger.LoggerFromCtx(ctx).Warn("Error validating token", slog.String("token", string(tokenBytes)), slog.Any("error", err)) return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") } return ctx, nil } - -func getCachedHash(hashCache *expirable.LRU[string, []byte]) ([]byte, error) { - if value, ok := hashCache.Get(hashedKey); ok { - return value, nil - } - plaintext := peerdbenv.PeerDBPassword() - if plaintext == "" { - return nil, nil - } - hashedPassword, err := bcrypt.GenerateFromPassword([]byte(plaintext), bcrypt.DefaultCost) - if err != nil { - return nil, err - } - hashCache.Add(hashedKey, hashedPassword) - return hashedPassword, nil -} diff --git a/flow/middleware/interceptors.go b/flow/middleware/interceptors.go index 8b06f85ca7..4df174d1d3 100644 --- a/flow/middleware/interceptors.go +++ b/flow/middleware/interceptors.go @@ -2,11 +2,10 @@ package middleware import ( "context" - "fmt" "log/slog" "time" - "github.com/hashicorp/golang-lru/v2/expirable" + "golang.org/x/crypto/bcrypt" "google.golang.org/grpc" "google.golang.org/grpc/peer" "google.golang.org/grpc/status" @@ -30,18 +29,16 @@ func CreateAuthServerInterceptor(ctx context.Context, unauthenticatedMethods []s unauthenticatedMethodsSet[method] = struct{}{} } - // accommodate live password changes and reduce time hash is in memory - hashCache := expirable.NewLRU[string, []byte](1, nil, 10*time.Minute) - _, err := getCachedHash(hashCache) + hash, err := bcrypt.GenerateFromPassword([]byte(plaintext), bcrypt.DefaultCost) if err != nil { - return nil, fmt.Errorf("error generating hash: %w", err) + return nil, err } return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (any, error) { if _, ok := unauthenticatedMethodsSet[info.FullMethod]; ok { return handler(ctx, req) } - ctx, err := authorize(ctx, hashCache) + ctx, err := authorize(ctx, hash) if err != nil { return nil, err } From b14d2ae1eebc916fd6b96ba89679883fc8863ff7 Mon Sep 17 00:00:00 2001 From: Kevin Biju Date: Wed, 12 Jun 2024 18:07:27 +0530 Subject: [PATCH 31/31] remove bad logs --- flow/middleware/auth.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/flow/middleware/auth.go b/flow/middleware/auth.go index 51729b4a83..0f26eafd36 100644 --- a/flow/middleware/auth.go +++ b/flow/middleware/auth.go @@ -30,12 +30,12 @@ func authorize(ctx context.Context, hash []byte) (context.Context, error) { // Always a good practice to have the actual token in base64 tokenBytes, err := base64.StdEncoding.DecodeString(base64Token) if err != nil { - logger.LoggerFromCtx(ctx).Warn("Error decoding token", slog.String("token", base64Token), slog.Any("error", err)) + logger.LoggerFromCtx(ctx).Warn("Error decoding token", slog.Any("error", err)) return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") } if err := bcrypt.CompareHashAndPassword(hash, tokenBytes); err != nil { - logger.LoggerFromCtx(ctx).Warn("Error validating token", slog.String("token", string(tokenBytes)), slog.Any("error", err)) + logger.LoggerFromCtx(ctx).Warn("Error validating token", slog.Any("error", err)) return nil, status.Errorf(codes.Unauthenticated, "Authentication failed") } return ctx, nil