Skip to content

Commit

Permalink
ok actually it wasn't working and still isn't
Browse files Browse the repository at this point in the history
  • Loading branch information
theosanderson committed Oct 8, 2024
1 parent 8e3ec29 commit 7f81021
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 7 deletions.
6 changes: 4 additions & 2 deletions taxonium_component/src/webworkers/localBackendWorker.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ import {
import { processNewickAndMetadata } from "../utils/processNewick.js";
import { processNextstrain } from "../utils/processNextstrain.js";
import { ReadableWebToNodeStream } from "readable-web-to-node-stream";
import StreamValues from 'stream-json/streamers/StreamValues';
import {parser } from "stream-json";
import {streamValues } from "stream-json/streamers/StreamValues";

console.log("worker starting");
postMessage({ data: "Worker starting" });
Expand Down Expand Up @@ -213,8 +214,9 @@ onmessage = async (event) => {
data.data,
sendStatusMessage,
ReadableWebToNodeStream,
StreamValues
parser,streamValues
);

console.log("processedUploadedData created");
} else if (
data.type === "upload" &&
Expand Down
10 changes: 5 additions & 5 deletions taxonium_data_handling/importing.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,13 @@ function reduceMaxOrMin(array, accessFunction, maxOrMin) {
}
}

export const setUpStream = (the_stream, data, sendStatusMessage, StreamValues) => {
export const setUpStream = (the_stream, data, sendStatusMessage, parser, streamValues) => {
const splitter = new StreamSplitter();

// Custom header parser using json-stream
const headerParser = stream.pipeline(
StreamValues.parser({ jsonStreaming: true }),
StreamValues.streamValues(),
parser({ jsonStreaming: true }),
streamValues(),
new stream.Writable({
objectMode: true,
write(chunk, encoding, callback) {
Expand Down Expand Up @@ -162,7 +162,7 @@ export const setUpStream = (the_stream, data, sendStatusMessage, StreamValues) =
export const processJsonl = async (
jsonl,
sendStatusMessage,
ReadableWebToNodeStream, StreamValues
ReadableWebToNodeStream, parser, streamValues
) => {
console.log(
"Worker processJsonl" //, jsonl
Expand All @@ -178,7 +178,7 @@ export const processJsonl = async (
the_stream = new stream.PassThrough();
}
let new_data = {};
setUpStream(the_stream, new_data, sendStatusMessage,StreamValues);
setUpStream(the_stream, new_data, sendStatusMessage,parser, streamValues);

if (status === "loaded") {
const dataAsArrayBuffer = data;
Expand Down

0 comments on commit 7f81021

Please sign in to comment.