From da7ee6f47b2d5db93ccb91b151bcd5b86d8ec35d Mon Sep 17 00:00:00 2001 From: Balaji Alwar Date: Fri, 28 Jun 2024 16:25:25 -0700 Subject: [PATCH 1/6] Fixing the right model --- deployments/dev/images/default/postBuild | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 deployments/dev/images/default/postBuild diff --git a/deployments/dev/images/default/postBuild b/deployments/dev/images/default/postBuild new file mode 100644 index 000000000..170e9ac5a --- /dev/null +++ b/deployments/dev/images/default/postBuild @@ -0,0 +1,18 @@ +#!/bin/bash + +# Change to a directory where you want to download and install TinyLLAMA +cd /opt + +# Clone the ollama repository (replace with actual repository URL) +git clone https://github.com/ollama/ollama.git + +# Change into the ollama directory +cd ollama + +# Install necessary dependencies +pip install -r requirements.txt + +# Install TinyLLAMA model (adjust as per actual model installation instructions) +./ollama-linux-amd64 run phi3:mini +./ollama-linux-amd64 run tinyllama + From 6eca0c95eb601d98516d0752d5177fdc72aed2c7 Mon Sep 17 00:00:00 2001 From: Balaji Alwar Date: Fri, 28 Jun 2024 16:30:35 -0700 Subject: [PATCH 2/6] removing pip install --- deployments/dev/images/default/postBuild | 3 --- 1 file changed, 3 deletions(-) diff --git a/deployments/dev/images/default/postBuild b/deployments/dev/images/default/postBuild index 170e9ac5a..4ec764825 100644 --- a/deployments/dev/images/default/postBuild +++ b/deployments/dev/images/default/postBuild @@ -9,9 +9,6 @@ git clone https://github.com/ollama/ollama.git # Change into the ollama directory cd ollama -# Install necessary dependencies -pip install -r requirements.txt - # Install TinyLLAMA model (adjust as per actual model installation instructions) ./ollama-linux-amd64 run phi3:mini ./ollama-linux-amd64 run tinyllama From 4a6b2220c32289a61840e47e7131b21d03ed17ba Mon Sep 17 00:00:00 2001 From: Balaji Alwar Date: Fri, 28 Jun 2024 18:21:34 -0700 Subject: [PATCH 3/6] Change installation to docker image based --- deployments/dev/images/default/postBuild | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/deployments/dev/images/default/postBuild b/deployments/dev/images/default/postBuild index 4ec764825..c974fa225 100644 --- a/deployments/dev/images/default/postBuild +++ b/deployments/dev/images/default/postBuild @@ -1,15 +1,10 @@ #!/bin/bash -# Change to a directory where you want to download and install TinyLLAMA -cd /opt +# Pull and run the Ollama Docker images - https://hub.docker.com/r/ollama/ollama +docker pull ollama/phi3 +docker pull ollama/tinyllama -# Clone the ollama repository (replace with actual repository URL) -git clone https://github.com/ollama/ollama.git - -# Change into the ollama directory -cd ollama - -# Install TinyLLAMA model (adjust as per actual model installation instructions) -./ollama-linux-amd64 run phi3:mini -./ollama-linux-amd64 run tinyllama +# Start the Ollama containers +docker run -d --name phi3_container ollama/phi3 +docker run -d --name tinyllama_container ollama/tinyllama From 7ba3aefdeee1f87f022428a6de68b1d230a9dd3e Mon Sep 17 00:00:00 2001 From: Balaji Alwar Date: Tue, 2 Jul 2024 15:31:28 -0700 Subject: [PATCH 4/6] Adding ollama models as part of extracontainer stanza --- deployments/dev/config/common.yaml | 13 +++++++++++++ deployments/dev/images/default/postBuild | 10 ---------- 2 files changed, 13 insertions(+), 10 deletions(-) delete mode 100644 deployments/dev/images/default/postBuild diff --git a/deployments/dev/config/common.yaml b/deployments/dev/config/common.yaml index f0c49b1de..dcfbbe45c 100644 --- a/deployments/dev/config/common.yaml +++ b/deployments/dev/config/common.yaml @@ -63,6 +63,19 @@ jupyterhub: # Unset NotebookApp from hub/values. Necessary for recent lab versions. JUPYTERHUB_SINGLEUSER_APP: "jupyter_server.serverapp.ServerApp" defaultUrl: /lab + # Run the ollama model as part of the user pod + # https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_containers + extraContainers: + - name: ollama-phi3-container + image: phi3:latest + command: ["ollama", "run", "phi3"] + ports: + - containerPort: 5000 + - name: ollama-tinyllama-container + image: tinyllama:latest + command: ["ollama", "run", "tinyllama"] + ports: + - containerPort: 5001 profileList: - display_name: "Dockerfile image" description: "This is the original dev image." diff --git a/deployments/dev/images/default/postBuild b/deployments/dev/images/default/postBuild deleted file mode 100644 index c974fa225..000000000 --- a/deployments/dev/images/default/postBuild +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -# Pull and run the Ollama Docker images - https://hub.docker.com/r/ollama/ollama -docker pull ollama/phi3 -docker pull ollama/tinyllama - -# Start the Ollama containers -docker run -d --name phi3_container ollama/phi3 -docker run -d --name tinyllama_container ollama/tinyllama - From 851713ac1b588f149a7503529ddf81185187c4db Mon Sep 17 00:00:00 2001 From: Balaji Alwar Date: Tue, 2 Jul 2024 17:10:01 -0700 Subject: [PATCH 5/6] Reverting to postbuild based approach --- deployments/dev/config/common.yaml | 13 ------------- deployments/dev/images/default/postBuild | 13 +++++++++++++ 2 files changed, 13 insertions(+), 13 deletions(-) create mode 100644 deployments/dev/images/default/postBuild diff --git a/deployments/dev/config/common.yaml b/deployments/dev/config/common.yaml index dcfbbe45c..f0c49b1de 100644 --- a/deployments/dev/config/common.yaml +++ b/deployments/dev/config/common.yaml @@ -63,19 +63,6 @@ jupyterhub: # Unset NotebookApp from hub/values. Necessary for recent lab versions. JUPYTERHUB_SINGLEUSER_APP: "jupyter_server.serverapp.ServerApp" defaultUrl: /lab - # Run the ollama model as part of the user pod - # https://jupyterhub-kubespawner.readthedocs.io/en/latest/spawner.html#kubespawner.KubeSpawner.extra_containers - extraContainers: - - name: ollama-phi3-container - image: phi3:latest - command: ["ollama", "run", "phi3"] - ports: - - containerPort: 5000 - - name: ollama-tinyllama-container - image: tinyllama:latest - command: ["ollama", "run", "tinyllama"] - ports: - - containerPort: 5001 profileList: - display_name: "Dockerfile image" description: "This is the original dev image." diff --git a/deployments/dev/images/default/postBuild b/deployments/dev/images/default/postBuild new file mode 100644 index 000000000..c0341c736 --- /dev/null +++ b/deployments/dev/images/default/postBuild @@ -0,0 +1,13 @@ +#!/bin/bash + +# Download the Ollama executable +wget https://github.com/ollama/ollama/releases/download/v0.1.48/ollama-linux-amd64 -O ollama + +# Make the Ollama executable runnable +chmod +x ollama + +# Run the phi3:mini model +./ollama run phi3:mini + +# Run the tinyllama model +./ollama run tinyllama From 24295db752f37009561fb355a459b1c2c3b6fd03 Mon Sep 17 00:00:00 2001 From: Balaji Alwar Date: Wed, 3 Jul 2024 10:40:11 -0700 Subject: [PATCH 6/6] Add directory where data gets saved --- deployments/dev/images/default/postBuild | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/deployments/dev/images/default/postBuild b/deployments/dev/images/default/postBuild index c0341c736..450043039 100644 --- a/deployments/dev/images/default/postBuild +++ b/deployments/dev/images/default/postBuild @@ -1,13 +1,19 @@ #!/bin/bash +# Define the directory for persistent storage +OLLAMA_DATA_DIR="/persistent/ollama" + +# Create the directory if it doesn't exist +mkdir -p $OLLAMA_DATA_DIR + # Download the Ollama executable -wget https://github.com/ollama/ollama/releases/download/v0.1.48/ollama-linux-amd64 -O ollama +wget https://github.com/ollama/ollama/releases/download/v0.1.48/ollama-linux-amd64 -O /usr/local/bin/ollama # Make the Ollama executable runnable -chmod +x ollama +chmod +x /usr/local/bin/ollama -# Run the phi3:mini model -./ollama run phi3:mini +# Run the phi3:mini model with the specified data directory +/usr/local/bin/ollama run phi3:mini --data-dir $OLLAMA_DATA_DIR -# Run the tinyllama model -./ollama run tinyllama +# Run the tinyllama model with the specified data directory +/usr/local/bin/ollama run tinyllama --data-dir $OLLAMA_DATA_DIR