From 87e5d71baeaf23acb2f025e228ed550229649160 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 17:24:20 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../src/webworkers/localBackendWorker.js | 9 +++--- taxonium_data_handling/importing.js | 28 +++++++++++-------- 2 files changed, 21 insertions(+), 16 deletions(-) diff --git a/taxonium_component/src/webworkers/localBackendWorker.js b/taxonium_component/src/webworkers/localBackendWorker.js index f0d86a59..36e030fe 100644 --- a/taxonium_component/src/webworkers/localBackendWorker.js +++ b/taxonium_component/src/webworkers/localBackendWorker.js @@ -7,8 +7,8 @@ import { import { processNewickAndMetadata } from "../utils/processNewick.js"; import { processNextstrain } from "../utils/processNextstrain.js"; import { ReadableWebToNodeStream } from "readable-web-to-node-stream"; -import {parser } from "stream-json"; -import {streamValues } from "stream-json/streamers/StreamValues"; +import { parser } from "stream-json"; +import { streamValues } from "stream-json/streamers/StreamValues"; console.log("worker starting"); postMessage({ data: "Worker starting" }); @@ -214,9 +214,10 @@ onmessage = async (event) => { data.data, sendStatusMessage, ReadableWebToNodeStream, - parser,streamValues + parser, + streamValues ); - + console.log("processedUploadedData created"); } else if ( data.type === "upload" && diff --git a/taxonium_data_handling/importing.js b/taxonium_data_handling/importing.js index 9cbe3878..9ec72620 100644 --- a/taxonium_data_handling/importing.js +++ b/taxonium_data_handling/importing.js @@ -20,7 +20,6 @@ class StreamSplitter extends stream.Transform { const headerData = data.slice(0, newlineIndex); const restData = data.slice(newlineIndex + 1); - // Write header data to headerParser this.headerParser.write(headerData); this.headerParser.end(); @@ -33,7 +32,7 @@ class StreamSplitter extends stream.Transform { this.firstPart = false; } else { // No newline found, store data in buffer - this.headerParser.write(data); + this.headerParser.write(data); } } else { // After header is processed, pass data to dataParser @@ -55,7 +54,6 @@ class StreamSplitter extends stream.Transform { } } - const roundToDp = (number, dp) => { return Math.round(number * Math.pow(10, dp)) / Math.pow(10, dp); }; @@ -92,21 +90,24 @@ export const setUpStream = ( // Header parser const headerParser = parser({ jsonStreaming: true }); const headerPipeline = headerParser.pipe(streamValues()); - + let headerBytesProcessed = 0; const HEADER_PROGRESS_INTERVAL = 1024 * 1024; // 1MB - headerParser.on('data', (chunk) => { + headerParser.on("data", (chunk) => { headerBytesProcessed += chunk.length; if (headerBytesProcessed >= HEADER_PROGRESS_INTERVAL) { sendStatusMessage({ - message: `Processing header: ${(headerBytesProcessed / (1024 * 1024)).toFixed(2)} MB processed`, + message: `Processing header: ${( + headerBytesProcessed / + (1024 * 1024) + ).toFixed(2)} MB processed`, }); headerBytesProcessed = 0; // Reset the counter } }); - headerPipeline.on('data', (chunk) => { + headerPipeline.on("data", (chunk) => { data.header = chunk.value; data.nodes = []; data.node_to_mut = {}; @@ -115,7 +116,7 @@ export const setUpStream = ( }); }); - headerPipeline.on('error', (err) => { + headerPipeline.on("error", (err) => { console.error("Header parser error:", err); sendStatusMessage({ error: `Header parser error: ${err.message}`, @@ -177,7 +178,9 @@ export const setUpStream = ( const splitter = new StreamSplitter(headerParser, dataParser); // Pipe the input stream through the splitter - the_stream.pipe(splitter).on("error", (err) => console.error("Splitter error:", err)); + the_stream + .pipe(splitter) + .on("error", (err) => console.error("Splitter error:", err)); // Handle the completion of the dataParser dataParser.on("finish", () => { @@ -185,11 +188,12 @@ export const setUpStream = ( }); }; - export const processJsonl = async ( jsonl, sendStatusMessage, - ReadableWebToNodeStream, parser, streamValues + ReadableWebToNodeStream, + parser, + streamValues ) => { console.log( "Worker processJsonl" //, jsonl @@ -205,7 +209,7 @@ export const processJsonl = async ( the_stream = new stream.PassThrough(); } let new_data = {}; - setUpStream(the_stream, new_data, sendStatusMessage,parser, streamValues); + setUpStream(the_stream, new_data, sendStatusMessage, parser, streamValues); if (status === "loaded") { const dataAsArrayBuffer = data;