diff --git a/server/README.md b/server/README.md index c403366..71ee303 100644 --- a/server/README.md +++ b/server/README.md @@ -34,3 +34,10 @@ You can check out [the Next.js GitHub repository](https://github.com/vercel/next The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. + + +## Deploy on Supabase + +There is also a database / auth component, which uses Supabase. It runs locally, but can also run within the free tier on [supabase.com](https://supabase.com). + +If you add a migration locally, you can deploy that migration with `supabase db push`. diff --git a/server/app/query/create/page.tsx b/server/app/query/create/page.tsx index 494656d..7ba935b 100644 --- a/server/app/query/create/page.tsx +++ b/server/app/query/create/page.tsx @@ -15,8 +15,11 @@ import { IPARemoteServers, RemoteServersType, } from "@/app/query/servers"; -import NewQueryId from "@/app/query/haikunator"; import { Branch, Branches, Commits } from "@/app/query/github"; +import { createNewQuery, Query } from "@/data/query"; +import { Database } from "@/data/supabaseTypes"; + +type QueryType = Database["public"]["Enums"]["query_type"]; export default function Page() { const [queryId, setQueryId] = useState(null); @@ -24,26 +27,29 @@ export default function Page() { const handleFormSubmit = async ( event: FormEvent, + queryType: QueryType, remoteServers: RemoteServersType, ) => { event.preventDefault(); try { - const newQueryId = NewQueryId(); - setQueryId(newQueryId); + const params = new FormData(event.currentTarget); + const query: Query = await createNewQuery(params, queryType); + + setQueryId(query.displayId); + // Send a POST request to start the process - const formData = new FormData(event.currentTarget); for (const remoteServer of Object.values(remoteServers)) { - const response = await fetch(remoteServer.startURL(newQueryId), { + const response = await fetch(remoteServer.startURL(query.uuid), { method: "POST", - body: formData, + body: params, }); - const data = await response.json(); + const _data = await response.json(); } await new Promise((f) => setTimeout(f, 1000)); // Redirect to /query/view/ - router.push(`/query/view/${newQueryId}`); + router.push(`/query/view/${query.displayId}`); } catch (error) { console.error("Error starting process:", error); } @@ -52,11 +58,11 @@ export default function Page() { const handleDemoLogsFormSubmit = async ( event: FormEvent, ) => { - await handleFormSubmit(event, DemoLoggerRemoteServers); + await handleFormSubmit(event, "DEMO_LOGGER", DemoLoggerRemoteServers); }; const handleIPAFormSubmit = async (event: FormEvent) => { - await handleFormSubmit(event, IPARemoteServers); + await handleFormSubmit(event, "IPA", IPARemoteServers); }; return ( diff --git a/server/app/query/haikunator.tsx b/server/app/query/haikunator.tsx index 529cef7..61847a4 100644 --- a/server/app/query/haikunator.tsx +++ b/server/app/query/haikunator.tsx @@ -6,6 +6,16 @@ const haikunator = new Haikunator({ nouns: nouns, }); +function getCurrentTimestamp() { + const now = new Date(); + const year = now.getFullYear(); + const month = (now.getMonth() + 1).toString().padStart(2, '0'); + const day = now.getDate().toString().padStart(2, '0'); + const hours = now.getHours().toString().padStart(2, '0'); + const minutes = now.getMinutes().toString().padStart(2, '0'); + return `${year}-${month}-${day}T${hours}${minutes}`; +} + export default function NewQueryId(): string { - return haikunator.haikunate(); + return encodeURIComponent(haikunator.haikunate({tokenLength: 0}) + getCurrentTimestamp()); } diff --git a/server/app/query/servers.tsx b/server/app/query/servers.tsx index d23a043..b35d3e4 100644 --- a/server/app/query/servers.tsx +++ b/server/app/query/servers.tsx @@ -13,6 +13,7 @@ export interface ServerLog { } export enum Status { + QUEUED = "QUEUED", STARTING = "STARTING", COMPILING = "COMPILING", WAITING_TO_START = "WAITING_TO_START", diff --git a/server/app/query/view/[id]/components.tsx b/server/app/query/view/[id]/components.tsx index 1d49102..187cac9 100644 --- a/server/app/query/view/[id]/components.tsx +++ b/server/app/query/view/[id]/components.tsx @@ -22,6 +22,7 @@ type StatusClassNameMixinsType = { }; const StatusClassNameMixins: StatusClassNameMixinsType = { + QUEUED: "bg-slate-300 dark:bg-slate-700", STARTING: "bg-emerald-300 dark:bg-emerald-700 animate-pulse", COMPILING: "bg-emerald-300 dark:bg-emerald-700 animate-pulse", WAITING_TO_START: "bg-emerald-300 dark:bg-emerald-700 animate-pulse", diff --git a/server/app/query/view/[id]/page.tsx b/server/app/query/view/[id]/page.tsx index 461ec8b..8e17476 100644 --- a/server/app/query/view/[id]/page.tsx +++ b/server/app/query/view/[id]/page.tsx @@ -21,8 +21,10 @@ import { initialRunTimeByRemoteServer, } from "@/app/query/servers"; import { StatsComponent } from "@/app/query/view/[id]/charts"; +import { getQuery, Query } from "@/data/query"; export default function Query({ params }: { params: { id: string } }) { + const [query, setQuery] = useState(null); // display controls const [logsHidden, setLogsHidden] = useState(true); const [statsHidden, setStatsHidden] = useState(true); @@ -44,9 +46,11 @@ export default function Query({ params }: { params: { id: string } }) { } const kill = async (remoteServers: RemoteServersType) => { + const query: Query = await getQuery(params.id); + const fetchPromises = Object.values(remoteServers).map( async (remoteServer) => { - await fetch(remoteServer.killURL(params.id), { + await fetch(remoteServer.killURL(query.uuid), { method: "POST", }); }, @@ -56,26 +60,30 @@ export default function Query({ params }: { params: { id: string } }) { }; useEffect(() => { - let webSockets: WebSocket[] = []; - for (const remoteServer of Object.values(IPARemoteServers)) { - const loggingWs = remoteServer.openLogSocket(params.id, setLogs); - const statusWs = remoteServer.openStatusSocket( - params.id, - setStatusByRemoteServer, - ); - const statsWs = remoteServer.openStatsSocket( - params.id, - setStatsByRemoteServer, - setRunTimeByRemoteServer, - ); - webSockets = [...webSockets, loggingWs, statusWs, statsWs]; - } - - return () => { - for (const ws of webSockets) { - ws.close(); + (async () => { + const query: Query = await getQuery(params.id); + setQuery(query); + let webSockets: WebSocket[] = []; + for (const remoteServer of Object.values(IPARemoteServers)) { + const loggingWs = remoteServer.openLogSocket(query.uuid, setLogs); + const statusWs = remoteServer.openStatusSocket( + query.uuid, + setStatusByRemoteServer, + ); + const statsWs = remoteServer.openStatsSocket( + query.uuid, + setStatsByRemoteServer, + setRunTimeByRemoteServer, + ); + webSockets = [...webSockets, loggingWs, statusWs, statsWs]; } - }; + + return () => { + for (const ws of webSockets) { + ws.close(); + } + }; + })(); }, [params]); return ( diff --git a/server/data/query.ts b/server/data/query.ts new file mode 100644 index 0000000..7703844 --- /dev/null +++ b/server/data/query.ts @@ -0,0 +1,119 @@ +"use server"; + +import { notFound } from "next/navigation"; +import { cookies } from "next/headers"; +import { createServerClient } from "@supabase/ssr"; +import { Database, Json } from "@/data/supabaseTypes"; +import { Status } from "@/app/query/servers"; +import NewQueryId from "@/app/query/haikunator"; + +type QueryRow = Database["public"]["Tables"]["queries"]["Row"]; +type QueryType = Database["public"]["Enums"]["query_type"]; + +export interface Query { + uuid: string; + displayId: string; + type: QueryType; + status: Status; + params: Json; + createdAt: string; + startedAt: string | null; + endedAt: string | null; +} + +function processQueryData(data: QueryRow): Query { + return { + uuid: data.uuid, + displayId: data.display_id, + type: data.type as QueryType, + status: data.status as Status, + params: data.params, + createdAt: data.created_at, + startedAt: data.started_at, + endedAt: data.ended_at, + }; +} + +export async function getQuery(displayId: string): Promise { + const cookieStore = cookies(); + + const supabase = createServerClient( + process.env.NEXT_PUBLIC_SUPABASE_URL!, + process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, + { + cookies: { + get(name: string) { + return cookieStore.get(name)?.value; + }, + }, + }, + ); + + const { status, data, error } = await supabase + .from("queries") + .select("*") + .eq("display_id", displayId) + .limit(1) + .maybeSingle(); + + if (error) { + console.error(error); + } else if (status === 200) { + if (data) { + return processQueryData(data); + } else { + notFound(); + } + } + throw new Error(`${displayId} not found.`); +} + +export async function createNewQuery( + params: FormData, + queryType: QueryType, +): Promise { + const json = JSON.stringify(Object.fromEntries(params)); + const cookieStore = cookies(); + + const supabase = createServerClient( + process.env.NEXT_PUBLIC_SUPABASE_URL!, + process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!, + { + cookies: { + get(name: string) { + return cookieStore.get(name)?.value; + }, + }, + }, + ); + + const newQueryId = NewQueryId(); + + const { data: uniqueDisplayId, error: rpcError } = await supabase.rpc( + "generate_unique_display_id", + { p_display_id: newQueryId }, + ); + + if (rpcError) { + throw new Error(`${rpcError}`); + } + + const { data: queryRow, error: insertError } = await supabase + .from("queries") + .insert({ + display_id: uniqueDisplayId, + status: "QUEUED", + type: queryType, + params: json, + }) + .select() + .returns() + .single(); + + if (insertError) { + throw new Error(`${insertError}`); + } + + const query: Query = processQueryData(queryRow); + return query; +} diff --git a/server/data/supabaseTypes.ts b/server/data/supabaseTypes.ts new file mode 100644 index 0000000..45f8d3f --- /dev/null +++ b/server/data/supabaseTypes.ts @@ -0,0 +1,488 @@ +export type Json = + | string + | number + | boolean + | null + | { [key: string]: Json | undefined } + | Json[] + +export type Database = { + graphql_public: { + Tables: { + [_ in never]: never + } + Views: { + [_ in never]: never + } + Functions: { + graphql: { + Args: { + operationName?: string + query?: string + variables?: Json + extensions?: Json + } + Returns: Json + } + } + Enums: { + [_ in never]: never + } + CompositeTypes: { + [_ in never]: never + } + } + public: { + Tables: { + queries: { + Row: { + created_at: string + display_id: string + ended_at: string | null + params: Json + started_at: string | null + status: Database["public"]["Enums"]["status"] + type: Database["public"]["Enums"]["query_type"] + uuid: string + } + Insert: { + created_at?: string + display_id: string + ended_at?: string | null + params?: Json + started_at?: string | null + status: Database["public"]["Enums"]["status"] + type: Database["public"]["Enums"]["query_type"] + uuid?: string + } + Update: { + created_at?: string + display_id?: string + ended_at?: string | null + params?: Json + started_at?: string | null + status?: Database["public"]["Enums"]["status"] + type?: Database["public"]["Enums"]["query_type"] + uuid?: string + } + Relationships: [] + } + } + Views: { + [_ in never]: never + } + Functions: { + generate_unique_display_id: { + Args: { + p_display_id: string + } + Returns: string + } + } + Enums: { + query_type: "IPA" | "DEMO_LOGGER" + status: + | "QUEUED" + | "STARTING" + | "COMPILING" + | "WAITING_TO_START" + | "IN_PROGRESS" + | "COMPLETE" + | "KILLED" + | "NOT_FOUND" + | "CRASHED" + | "UNKNOWN" + } + CompositeTypes: { + [_ in never]: never + } + } + storage: { + Tables: { + buckets: { + Row: { + allowed_mime_types: string[] | null + avif_autodetection: boolean | null + created_at: string | null + file_size_limit: number | null + id: string + name: string + owner: string | null + owner_id: string | null + public: boolean | null + updated_at: string | null + } + Insert: { + allowed_mime_types?: string[] | null + avif_autodetection?: boolean | null + created_at?: string | null + file_size_limit?: number | null + id: string + name: string + owner?: string | null + owner_id?: string | null + public?: boolean | null + updated_at?: string | null + } + Update: { + allowed_mime_types?: string[] | null + avif_autodetection?: boolean | null + created_at?: string | null + file_size_limit?: number | null + id?: string + name?: string + owner?: string | null + owner_id?: string | null + public?: boolean | null + updated_at?: string | null + } + Relationships: [] + } + migrations: { + Row: { + executed_at: string | null + hash: string + id: number + name: string + } + Insert: { + executed_at?: string | null + hash: string + id: number + name: string + } + Update: { + executed_at?: string | null + hash?: string + id?: number + name?: string + } + Relationships: [] + } + objects: { + Row: { + bucket_id: string | null + created_at: string | null + id: string + last_accessed_at: string | null + metadata: Json | null + name: string | null + owner: string | null + owner_id: string | null + path_tokens: string[] | null + updated_at: string | null + version: string | null + } + Insert: { + bucket_id?: string | null + created_at?: string | null + id?: string + last_accessed_at?: string | null + metadata?: Json | null + name?: string | null + owner?: string | null + owner_id?: string | null + path_tokens?: string[] | null + updated_at?: string | null + version?: string | null + } + Update: { + bucket_id?: string | null + created_at?: string | null + id?: string + last_accessed_at?: string | null + metadata?: Json | null + name?: string | null + owner?: string | null + owner_id?: string | null + path_tokens?: string[] | null + updated_at?: string | null + version?: string | null + } + Relationships: [ + { + foreignKeyName: "objects_bucketId_fkey" + columns: ["bucket_id"] + isOneToOne: false + referencedRelation: "buckets" + referencedColumns: ["id"] + }, + ] + } + s3_multipart_uploads: { + Row: { + bucket_id: string + created_at: string + id: string + in_progress_size: number + key: string + owner_id: string | null + upload_signature: string + version: string + } + Insert: { + bucket_id: string + created_at?: string + id: string + in_progress_size?: number + key: string + owner_id?: string | null + upload_signature: string + version: string + } + Update: { + bucket_id?: string + created_at?: string + id?: string + in_progress_size?: number + key?: string + owner_id?: string | null + upload_signature?: string + version?: string + } + Relationships: [ + { + foreignKeyName: "s3_multipart_uploads_bucket_id_fkey" + columns: ["bucket_id"] + isOneToOne: false + referencedRelation: "buckets" + referencedColumns: ["id"] + }, + ] + } + s3_multipart_uploads_parts: { + Row: { + bucket_id: string + created_at: string + etag: string + id: string + key: string + owner_id: string | null + part_number: number + size: number + upload_id: string + version: string + } + Insert: { + bucket_id: string + created_at?: string + etag: string + id?: string + key: string + owner_id?: string | null + part_number: number + size?: number + upload_id: string + version: string + } + Update: { + bucket_id?: string + created_at?: string + etag?: string + id?: string + key?: string + owner_id?: string | null + part_number?: number + size?: number + upload_id?: string + version?: string + } + Relationships: [ + { + foreignKeyName: "s3_multipart_uploads_parts_bucket_id_fkey" + columns: ["bucket_id"] + isOneToOne: false + referencedRelation: "buckets" + referencedColumns: ["id"] + }, + { + foreignKeyName: "s3_multipart_uploads_parts_upload_id_fkey" + columns: ["upload_id"] + isOneToOne: false + referencedRelation: "s3_multipart_uploads" + referencedColumns: ["id"] + }, + ] + } + } + Views: { + [_ in never]: never + } + Functions: { + can_insert_object: { + Args: { + bucketid: string + name: string + owner: string + metadata: Json + } + Returns: undefined + } + extension: { + Args: { + name: string + } + Returns: string + } + filename: { + Args: { + name: string + } + Returns: string + } + foldername: { + Args: { + name: string + } + Returns: string[] + } + get_size_by_bucket: { + Args: Record + Returns: { + size: number + bucket_id: string + }[] + } + list_multipart_uploads_with_delimiter: { + Args: { + bucket_id: string + prefix_param: string + delimiter_param: string + max_keys?: number + next_key_token?: string + next_upload_token?: string + } + Returns: { + key: string + id: string + created_at: string + }[] + } + list_objects_with_delimiter: { + Args: { + bucket_id: string + prefix_param: string + delimiter_param: string + max_keys?: number + start_after?: string + next_token?: string + } + Returns: { + name: string + id: string + metadata: Json + updated_at: string + }[] + } + search: { + Args: { + prefix: string + bucketname: string + limits?: number + levels?: number + offsets?: number + search?: string + sortcolumn?: string + sortorder?: string + } + Returns: { + name: string + id: string + updated_at: string + created_at: string + last_accessed_at: string + metadata: Json + }[] + } + } + Enums: { + [_ in never]: never + } + CompositeTypes: { + [_ in never]: never + } + } +} + +type PublicSchema = Database[Extract] + +export type Tables< + PublicTableNameOrOptions extends + | keyof (PublicSchema["Tables"] & PublicSchema["Views"]) + | { schema: keyof Database }, + TableName extends PublicTableNameOrOptions extends { schema: keyof Database } + ? keyof (Database[PublicTableNameOrOptions["schema"]]["Tables"] & + Database[PublicTableNameOrOptions["schema"]]["Views"]) + : never = never, +> = PublicTableNameOrOptions extends { schema: keyof Database } + ? (Database[PublicTableNameOrOptions["schema"]]["Tables"] & + Database[PublicTableNameOrOptions["schema"]]["Views"])[TableName] extends { + Row: infer R + } + ? R + : never + : PublicTableNameOrOptions extends keyof (PublicSchema["Tables"] & + PublicSchema["Views"]) + ? (PublicSchema["Tables"] & + PublicSchema["Views"])[PublicTableNameOrOptions] extends { + Row: infer R + } + ? R + : never + : never + +export type TablesInsert< + PublicTableNameOrOptions extends + | keyof PublicSchema["Tables"] + | { schema: keyof Database }, + TableName extends PublicTableNameOrOptions extends { schema: keyof Database } + ? keyof Database[PublicTableNameOrOptions["schema"]]["Tables"] + : never = never, +> = PublicTableNameOrOptions extends { schema: keyof Database } + ? Database[PublicTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Insert: infer I + } + ? I + : never + : PublicTableNameOrOptions extends keyof PublicSchema["Tables"] + ? PublicSchema["Tables"][PublicTableNameOrOptions] extends { + Insert: infer I + } + ? I + : never + : never + +export type TablesUpdate< + PublicTableNameOrOptions extends + | keyof PublicSchema["Tables"] + | { schema: keyof Database }, + TableName extends PublicTableNameOrOptions extends { schema: keyof Database } + ? keyof Database[PublicTableNameOrOptions["schema"]]["Tables"] + : never = never, +> = PublicTableNameOrOptions extends { schema: keyof Database } + ? Database[PublicTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Update: infer U + } + ? U + : never + : PublicTableNameOrOptions extends keyof PublicSchema["Tables"] + ? PublicSchema["Tables"][PublicTableNameOrOptions] extends { + Update: infer U + } + ? U + : never + : never + +export type Enums< + PublicEnumNameOrOptions extends + | keyof PublicSchema["Enums"] + | { schema: keyof Database }, + EnumName extends PublicEnumNameOrOptions extends { schema: keyof Database } + ? keyof Database[PublicEnumNameOrOptions["schema"]]["Enums"] + : never = never, +> = PublicEnumNameOrOptions extends { schema: keyof Database } + ? Database[PublicEnumNameOrOptions["schema"]]["Enums"][EnumName] + : PublicEnumNameOrOptions extends keyof PublicSchema["Enums"] + ? PublicSchema["Enums"][PublicEnumNameOrOptions] + : never + diff --git a/server/supabase/migrations/20240607225621_add_query_row.sql b/server/supabase/migrations/20240607225621_add_query_row.sql new file mode 100644 index 0000000..ff91951 --- /dev/null +++ b/server/supabase/migrations/20240607225621_add_query_row.sql @@ -0,0 +1,54 @@ +create type status as enum ('QUEUED', 'STARTING', 'COMPILING', 'WAITING_TO_START', 'IN_PROGRESS', 'COMPLETE', 'KILLED', 'NOT_FOUND', 'CRASHED', 'UNKNOWN'); + +create type query_type as enum ('IPA', 'DEMO_LOGGER'); + +create table +queries ( +uuid uuid default gen_random_uuid() primary key, +display_id varchar(255) unique not null, +type query_type not null, +status status not null, +params jsonb not null default '{}'::jsonb, +created_at timestamp default current_timestamp not null, +started_at timestamp, +ended_at timestamp +); + +create index idx_display_id on queries (display_id); + +alter table queries enable row level security; + +create policy "Queries are visible to authenticated users" +on queries for select +to authenticated +using ( true ); + +create policy "Queries are only created by authenticated users" +on queries for insert +to authenticated +with check ( true ); + +create policy "Queries are only updated by authenticated users" +on queries for update +to authenticated +using ( true ) +with check ( true ); + +create or replace function generate_unique_display_id(p_display_id varchar) returns varchar as $$ +declare + new_display_id varchar; + suffix varchar; +begin + new_display_id := p_display_id; + suffix := ''; -- initialize the suffix as an empty string + + -- check if the initial short name exists + while exists (select 1 from queries where display_id = new_display_id) loop + -- if exists, append one digit at a time + suffix := case when suffix = '' then '-' else suffix end || floor(random() * 10)::text; + new_display_id := p_display_id || suffix; + end loop; + + return new_display_id; +end; +$$ language plpgsql;