Skip to content
This repository has been archived by the owner on Jan 2, 2025. It is now read-only.

Commit

Permalink
use memory level for stackblitz compatibility (#76)
Browse files Browse the repository at this point in the history
  • Loading branch information
seveibar authored Jul 9, 2024
1 parent ca3997d commit aba4cbe
Show file tree
Hide file tree
Showing 7 changed files with 138 additions and 9 deletions.
Binary file modified bun.lockb
Binary file not shown.
Binary file modified dev-server-api/bun.lockb
Binary file not shown.
2 changes: 1 addition & 1 deletion dev-server-api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"typescript": "^5.0.0"
},
"dependencies": {
"level": "^8.0.1",
"memory-level": "^1.0.0",
"redaxios": "^0.5.1",
"winterspec": "0.0.81",
"zod": "^3.22.4"
Expand Down
2 changes: 2 additions & 0 deletions dev-server-api/routes/api/export_files/create.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,14 @@ export default withWinterSpec({
}),
auth: "none",
})(async (req, ctx) => {
console.log("putting file")
const export_file = await ctx.db.put("export_file", {
export_request_id: req.jsonBody.export_request_id,
file_name: req.jsonBody.file_name,
file_content_base64: req.jsonBody.file_content_base64,
created_at: new Date().toISOString(),
})
console.log("done putting file")

return ctx.json({
export_file,
Expand Down
126 changes: 126 additions & 0 deletions dev-server-api/src/db/generic-json-level.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
import {
AbstractLevel,
AbstractIterator,
AbstractKeyIterator,
AbstractValueIterator,
} from "abstract-level"
import { promises as fs } from "fs"
import path from "path"

interface JSONLevelOptions {}

/**
* This is not totally tested yet, but is basically a NodeJS level that doesn't
* use C-bindings. It could be used as a replacement for memory-level to store
* to disk.
*/
class GenericJsonLevel extends AbstractLevel<string, any> {
private location: string

constructor(location: string, options?: JSONLevelOptions) {
super({ ...options, encodings: { utf8: true } })
this.location = location
}

async _open(): Promise<void> {
await fs.mkdir(this.location, { recursive: true })
}

async _put(key: string, value: any): Promise<void> {
const filePath = path.join(this.location, `${key}.json`)
console.log("writing file", filePath)
await fs.writeFile(filePath, JSON.stringify(value))
}

async _get(key: string): Promise<any> {
const filePath = path.join(this.location, `${key}.json`)
try {
const data = await fs.readFile(filePath, "utf8")
return JSON.parse(data)
} catch (error) {
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
throw new Error("NotFoundError")
}
throw error
}
}

async _del(key: string): Promise<void> {
const filePath = path.join(this.location, `${key}.json`)
try {
await fs.unlink(filePath)
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error
}
}
}

async _batch(
operations: Array<{ type: "put" | "del"; key: string; value?: any }>
): Promise<void> {
for (const op of operations) {
if (op.type === "put") {
await this._put(op.key, op.value)
} else if (op.type === "del") {
await this._del(op.key)
}
}
}

async _clear(): Promise<void> {
const files = await fs.readdir(this.location)
for (const file of files) {
if (file.endsWith(".json")) {
await fs.unlink(path.join(this.location, file))
}
}
}

_iterator(): AbstractIterator<this, string, any> {
let files: string[] = []
let index = 0

const nextFile = async (): Promise<{ key: string; value: any } | null> => {
if (index >= files.length) {
return null
}

const file = files[index++]
const key = path.basename(file, ".json")
const filePath = path.join(this.location, file)
const data = await fs.readFile(filePath, "utf8")
const value = JSON.parse(data)

return { key, value }
}

return {
async next() {
if (files.length === 0) {
files = (await fs.readdir(this.db.location)).filter((file) =>
file.endsWith(".json")
)
}

const entry = await nextFile()
if (entry === null) {
return undefined
}

return [entry.key, entry.value]
},
async seek(target: string) {
index = files.findIndex((file) => file.startsWith(target))
if (index === -1) {
index = files.length
}
},
async end() {
// No resources to clean up
},
}
}
}

export { GenericJsonLevel }
15 changes: 8 additions & 7 deletions dev-server-api/src/db/zod-level-db.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
import { Level } from "level"
// import { Level } from "level"
// import { GenericJsonLevel } from "./generic-json-level"
import { MemoryLevel } from "memory-level"
import { z } from "zod"
import { DBSchema, type DBSchemaType, type DBInputSchemaType } from "./schema"

// Create a wrapper class for Level with Zod validation
export class ZodLevelDatabase {
private db: Level<string, any>
private db: MemoryLevel<string, any>

constructor(location: string) {
this.db = new Level(location)
this.db = new MemoryLevel() // new GenericJsonLevel(location)
}

async open() {
Expand Down Expand Up @@ -35,11 +37,10 @@ export class ZodLevelDatabase {
const valueLoose: any = value
if (!valueLoose[idkey]) {
// generate an id using the "count" key
let count = await this.db
.get(`${collection}.count`, { valueEncoding: "json" })
.catch(() => 1)
let count = await this.db.get(`${collection}.count`).catch(() => 1)
if (typeof count === "string") count = parseInt(count)
;(value as any)[idkey] = count
await this.db.put(`${collection}.count`, count + 1)
await this.db.put(`${collection}.count`, (count + 1).toString())
}
const key = `${collection}:${valueLoose[idkey]}`
const validatedData = DBSchema.shape[collection].parse(value)
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@
"ignore": "^5.3.1",
"json5": "^2.2.3",
"kleur": "^4.1.5",
"level": "^8.0.1",
"lodash": "^4.17.21",
"memory-level": "^1.0.0",
"mime-types": "^2.1.35",
"minimist": "^1.2.8",
"node-persist": "^4.0.1",
Expand Down

0 comments on commit aba4cbe

Please sign in to comment.