-
-
Notifications
You must be signed in to change notification settings - Fork 522
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add cache from SVG generate by graphviz
Fixes #197 and working on super big graphs
- Loading branch information
1 parent
4ec0239
commit ec56990
Showing
10 changed files
with
258 additions
and
91 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,135 @@ | ||
import { | ||
RenderRequest, | ||
RenderResponse, | ||
RenderResult, | ||
VizWorkerHash, | ||
VizWorkerSource, | ||
// eslint-disable-next-line import/no-unresolved | ||
} from '../../worker/voyager.worker'; | ||
import { computeHash } from '../utils/compute-hash'; | ||
import { LocalStorageLRUCache } from '../utils/local-storage-lru-cache'; | ||
|
||
export class VizWorker { | ||
private _cache = new LocalStorageLRUCache({ | ||
localStorageKey: 'VoyagerSVGCache', | ||
maxSize: 10, | ||
}); | ||
private _worker: Worker; | ||
private _listeners: Array<(result: RenderResult) => void> = []; | ||
|
||
constructor() { | ||
const blob = new Blob([VizWorkerSource], { | ||
type: 'application/javascript', | ||
}); | ||
const url = URL.createObjectURL(blob); | ||
|
||
this._worker = new Worker(url, { name: 'graphql-voyager-worker' }); | ||
this._worker.addEventListener('message', (event) => { | ||
const { id, result } = event.data as RenderResponse; | ||
|
||
this._listeners[id](result); | ||
delete this._listeners[id]; | ||
}); | ||
} | ||
|
||
async renderString(dot: string): Promise<string> { | ||
const dotHash = await computeHash(dot); | ||
const cacheKey = `worker:${VizWorkerHash}:dot:${dotHash}`; | ||
|
||
try { | ||
const cachedSVG = this._cache.get(cacheKey); | ||
if (cachedSVG != null) { | ||
console.log('SVG cached'); | ||
return decompressFromDataURL(cachedSVG); | ||
} | ||
} catch (err) { | ||
console.warn('Can not read graphql-voyager cache: ', err); | ||
} | ||
|
||
console.time('Rendering SVG'); | ||
const svg = await this._renderString(dot); | ||
console.timeEnd('Rendering SVG'); | ||
|
||
try { | ||
this._cache.set(cacheKey, await compressToDataURL(svg)); | ||
} catch (err) { | ||
console.warn('Can not write graphql-voyager cache: ', err); | ||
} | ||
return svg; | ||
} | ||
|
||
_renderString(src: string): Promise<string> { | ||
return new Promise((resolve, reject) => { | ||
const id = this._listeners.length; | ||
|
||
this._listeners.push(function (result): void { | ||
if ('error' in result) { | ||
const { error } = result; | ||
const e = new Error(error.message); | ||
if (error.fileName) (e as any).fileName = error.fileName; | ||
if (error.lineNumber) (e as any).lineNumber = error.lineNumber; | ||
if (error.stack) (e as any).stack = error.stack; | ||
return reject(e); | ||
} | ||
resolve(result.value); | ||
}); | ||
|
||
const renderRequest: RenderRequest = { id, src }; | ||
this._worker.postMessage(renderRequest); | ||
}); | ||
} | ||
} | ||
|
||
async function decompressFromDataURL(dataURL: string): Promise<string> { | ||
const response = await fetch(dataURL); | ||
const blob = await response.blob(); | ||
switch (blob.type) { | ||
case 'application/gzip': { | ||
// @ts-expect-error DecompressionStream is missing from DOM types | ||
const stream = blob.stream().pipeThrough(new DecompressionStream('gzip')); | ||
const decompressedBlob = await streamToBlob(stream, 'text/plain'); | ||
return decompressedBlob.text(); | ||
} | ||
case 'text/plain': | ||
return blob.text(); | ||
default: | ||
throw new Error('Can not convert data url with MIME type:' + blob.type); | ||
} | ||
} | ||
|
||
async function compressToDataURL(str: string): Promise<string> { | ||
try { | ||
const blob = new Blob([str], { type: 'text/plain' }); | ||
// @ts-expect-error CompressionStream is missing from DOM types | ||
const stream = blob.stream().pipeThrough(new CompressionStream('gzip')); | ||
const compressedBlob = await streamToBlob(stream, 'application/gzip'); | ||
return blobToDataURL(compressedBlob); | ||
} catch (err) { | ||
console.warn('Can not compress string: ', err); | ||
return `data:text/plain;charset=utf-8,${encodeURIComponent(str)}`; | ||
} | ||
} | ||
|
||
function blobToDataURL(blob: Blob): Promise<string> { | ||
const fileReader = new FileReader(); | ||
|
||
return new Promise((resolve, reject) => { | ||
try { | ||
fileReader.onload = function (event) { | ||
// eslint-disable-next-line @typescript-eslint/no-base-to-string | ||
const dataURL = event.target!.result!.toString(); | ||
resolve(dataURL); | ||
}; | ||
fileReader.readAsDataURL(blob); | ||
} catch (err) { | ||
reject(err); | ||
} | ||
}); | ||
} | ||
|
||
function streamToBlob(stream: ReadableStream, mimeType: string): Promise<Blob> { | ||
const response = new Response(stream, { | ||
headers: { 'Content-Type': mimeType }, | ||
}); | ||
return response.blob(); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
const textEncoder = new TextEncoder(); | ||
|
||
export async function computeHash(str: string): Promise<string> { | ||
const data = textEncoder.encode(str); | ||
const hashBuffer = await crypto.subtle.digest('SHA-256', data); | ||
|
||
const hashArray = Array.from(new Uint8Array(hashBuffer)); | ||
const hashHex = hashArray | ||
.map((b) => b.toString(16).padStart(2, '0')) | ||
.join(''); | ||
|
||
return hashHex; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
export class LocalStorageLRUCache { | ||
private _localStorageKey; | ||
private _maxSize; | ||
|
||
constructor(options: { localStorageKey: string; maxSize: number }) { | ||
this._localStorageKey = options.localStorageKey; | ||
this._maxSize = options.maxSize; | ||
} | ||
|
||
public set(key: string, value: string): void { | ||
const lru = this.readLRU(); | ||
lru.delete(key); | ||
lru.set(key, value); | ||
this.writeLRU(lru); | ||
} | ||
|
||
public get(key: string): string | null { | ||
const lru = this.readLRU(); | ||
const cachedValue = lru.get(key); | ||
if (cachedValue === undefined) { | ||
return null; | ||
} | ||
|
||
lru.delete(key); | ||
lru.set(key, cachedValue); | ||
this.writeLRU(lru); | ||
return cachedValue; | ||
} | ||
|
||
private readLRU(): Map<string, string> { | ||
const rawData = localStorage.getItem(this._localStorageKey); | ||
const data = JSON.parse(rawData ?? '{}'); | ||
return new Map(Array.isArray(data) ? data : []); | ||
} | ||
|
||
private writeLRU(lru: Map<string, string>): void { | ||
let maxSize = this._maxSize; | ||
for (;;) { | ||
try { | ||
const trimmedPairs = Array.from(lru).slice(-maxSize); | ||
const rawData = JSON.stringify(trimmedPairs); | ||
localStorage.setItem(this._localStorageKey, rawData); | ||
this._maxSize = maxSize; | ||
break; | ||
} catch (error) { | ||
if (maxSize <= 1) { | ||
throw error; | ||
} | ||
console.warn( | ||
`Can't write LRU cache with ${maxSize} entries. Retrying...`, | ||
); | ||
maxSize -= 1; | ||
} | ||
} | ||
} | ||
} |
Oops, something went wrong.