Skip to content

Commit

Permalink
Hf spaces compatibility (#124)
Browse files Browse the repository at this point in the history
  • Loading branch information
rsamf authored Dec 19, 2024
1 parent 09c76af commit bfd884a
Show file tree
Hide file tree
Showing 25 changed files with 1,037 additions and 837 deletions.
63 changes: 56 additions & 7 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,26 @@ name: Docker Build

on:
push:
branches: ["main"]
paths-ignore:
- 'docs/**'
- 'tests/**'
tags:
- "v*"
pull_request:

jobs:
docker:
base:
runs-on:
group: larger-runners
steps:
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
rsamf/graphbook
tags: |
type=ref,event=tag
type=semver,pattern={{version}}
type=sha
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

Expand All @@ -24,7 +34,46 @@ jobs:
- name: Build and push
uses: docker/build-push-action@v6
with:
push: true
tags: rsamf/graphbook:latest
push: ${{ github.event_name == 'push' && contains(github.ref, 'refs/tags') }}
file: ./docker/Dockerfile
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=rsamf/graphbook:latest
cache-to: type=inline

hfspace:
needs: base
runs-on:
group: larger-runners
steps:
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
flavor: |
suffix=-space
images: |
rsamf/graphbook
tags: |
type=ref,event=tag
type=semver,pattern={{version}}
type=sha
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}

- name: Build and push
uses: docker/build-push-action@v6
with:
push: ${{ github.event_name == 'push' && contains(github.ref, 'refs/tags') }}
file: ./docker/Dockerfile.hfspace
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=registry,ref=rsamf/graphbook:latest-space
cache-to: type=inline
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -133,3 +133,4 @@ You can use any other virtual environment solution, but it is highly adviced to
1. `cd web`
1. `deno install`
1. `deno run dev`
1. In your browser, navigate to localhost:5173, and in the settings, change your **Graph Server Host** to `localhost:8005`.
File renamed without changes.
5 changes: 5 additions & 0 deletions docker/Dockerfile.hfspace
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
FROM rsamf/graphbook:latest

RUN chown -R 1000:1000 .

CMD ["python", "-m", "graphbook.main", "--isolate-users"]
201 changes: 201 additions & 0 deletions graphbook/clients.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
from typing import Dict
import uuid
from aiohttp.web import WebSocketResponse
from .processing.web_processor import WebInstanceProcessor
from .utils import ProcessorStateRequest
from .nodes import NodeHub
from .viewer import ViewManager
import tempfile
import os.path as osp
from pathlib import Path
import multiprocessing as mp
import os
import asyncio
import shutil

DEFAULT_CLIENT_OPTIONS = {"SEND_EVERY": 0.5}


class Client:
def __init__(
self,
sid: str,
ws: WebSocketResponse,
processor: WebInstanceProcessor,
node_hub: NodeHub,
view_manager: ViewManager,
setup_paths: dict,
options: dict = DEFAULT_CLIENT_OPTIONS,
):
self.sid = sid
self.ws = ws
self.processor = processor
self.node_hub = node_hub
self.view_manager = view_manager
self.root_path = Path(setup_paths["workflow_dir"])
self.docs_path = Path(setup_paths["docs_path"])
self.custom_nodes_path = Path(setup_paths["custom_nodes_path"])
self.options = options
self.curr_task = None

def get_root_path(self) -> Path:
return self.root_path

def get_docs_path(self) -> Path:
return self.docs_path

def get_custom_nodes_path(self) -> Path:
return self.custom_nodes_path

def nodes(self):
return self.node_hub.get_exported_nodes()

def step_doc(self, name):
return self.node_hub.get_step_docstring(name)

def resource_doc(self, name):
return self.node_hub.get_resource_docstring(name)

def exec(self, req: dict):
self.processor.exec(req)

def poll(self, cmd: ProcessorStateRequest, data: dict = None):
res = self.processor.poll_client(
cmd,
data,
)
return res

async def _loop(self):
while True:
await asyncio.sleep(self.options["SEND_EVERY"])
current_view_data = self.view_manager.get_current_view_data()
current_states = self.view_manager.get_current_states()
all_data = [*current_view_data, *current_states]
await asyncio.gather(*[self.ws.send_json(data) for data in all_data])

def start(self):
loop = asyncio.get_event_loop()
self.curr_task = loop.create_task(self._loop())

async def close(self):
if self.curr_task is not None:
self.curr_task.cancel()
await self.ws.close()
self.processor.close()
self.node_hub.stop()


class ClientPool:
def __init__(
self,
web_processor_args: dict,
setup_paths: dict,
plugins: tuple,
isolate_users: bool,
no_sample: bool,
close_event: mp.Event,
):
self.clients: Dict[str, Client] = {}
self.tmpdirs: Dict[str, str] = {}
self.web_processor_args = web_processor_args
self.setup_paths = setup_paths
self.plugins = plugins
self.shared_execution = not isolate_users
self.no_sample = no_sample
self.close_event = close_event
if self.shared_execution:
self.shared_resources = self._create_resources(
web_processor_args, setup_paths
)

def _create_resources(self, web_processor_args: dict, setup_paths: dict):
view_queue = mp.Queue()
processor_args = {
**web_processor_args,
"custom_nodes_path": setup_paths["custom_nodes_path"],
"view_manager_queue": view_queue,
}
self._create_dirs(**setup_paths, no_sample=self.no_sample)
processor = WebInstanceProcessor(**processor_args)
view_manager = ViewManager(view_queue, self.close_event, processor)
node_hub = NodeHub(setup_paths["custom_nodes_path"], self.plugins, view_manager)
processor.start()
view_manager.start()
node_hub.start()
return {
"processor": processor,
"node_hub": node_hub,
"view_manager": view_manager,
}

def _create_dirs(
self, workflow_dir: str, custom_nodes_path: str, docs_path: str, no_sample: bool
):
def create_sample_workflow():
import shutil

project_path = Path(__file__).parent
assets_dir = project_path.joinpath("sample_assets")
n = "SampleWorkflow.json"
shutil.copyfile(assets_dir.joinpath(n), Path(workflow_dir).joinpath(n))
n = "SampleWorkflow.md"
shutil.copyfile(assets_dir.joinpath(n), Path(docs_path).joinpath(n))
n = "sample_nodes.py"
shutil.copyfile(assets_dir.joinpath(n), Path(custom_nodes_path).joinpath(n))

should_create_sample = False
if not osp.exists(workflow_dir):
should_create_sample = not no_sample
os.mkdir(workflow_dir)
if not osp.exists(custom_nodes_path):
os.mkdir(custom_nodes_path)
if not osp.exists(docs_path):
os.mkdir(docs_path)

if should_create_sample:
create_sample_workflow()

def add_client(self, ws: WebSocketResponse) -> Client:
sid = uuid.uuid4().hex
setup_paths = {**self.setup_paths}
if not self.shared_execution:
root_path = Path(tempfile.mkdtemp())
self.tmpdirs[sid] = root_path
setup_paths = {
key: root_path.joinpath(path) for key, path in setup_paths.items()
}
web_processor_args = {
**self.web_processor_args,
"custom_nodes_path": setup_paths["custom_nodes_path"],
}
resources = self._create_resources(web_processor_args, setup_paths)
else:
resources = self.shared_resources

client = Client(sid, ws, **resources, setup_paths=setup_paths)
client.start()
self.clients[sid] = client
asyncio.create_task(ws.send_json({"type": "sid", "data": sid}))
print(f"{sid}: {client.get_root_path()}")
return client

async def remove_client(self, client: Client):
sid = client.sid
if sid in self.clients:
await client.close()
del self.clients[sid]
if sid in self.tmpdirs:
shutil.rmtree(self.tmpdirs[sid])
del self.tmpdirs[sid]

async def remove_all(self):
for sid in self.clients:
await self.clients[sid].close()
for sid in self.tmpdirs:
os.rmdir(self.tmpdirs[sid])
self.clients = {}
self.tmpdirs = {}

def get(self, sid: str) -> Client | None:
return self.clients.get(sid, None)
Loading

0 comments on commit bfd884a

Please sign in to comment.