Skip to content

Commit

Permalink
Updated and fixed a bug where users could skip downloading the llava …
Browse files Browse the repository at this point in the history
…model model
  • Loading branch information
lunamidori5 committed Jun 20, 2024
1 parent 013ec46 commit b2824f6
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,11 @@ RUN mv localai_entrypoint.sh /build/entrypoint_md_ai.sh
RUN python3 -m pip install --no-cache-dir cryptography aiohttp tk
RUN python3 -m pip install --no-cache-dir -r midori_program_requirments.txt
RUN python3 -m pip cache purge

RUN mkdir /tempmodels

RUN midori_ai_downloader llava-v1.5-13b-Q6_K.gguf && mv llava-v1.5-13b-Q6_K.gguf /tempmodels/llava-v1.5-13b-Q6_K.gguf
RUN midori_ai_downloader mmproj-model-f16.gguf && mv mmproj-model-f16.gguf /tempmodels/mmproj-model-f16.gguf

RUN apt-get autoclean && apt-get clean

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ RUN python3 -m pip install --no-cache-dir cryptography aiohttp tk
RUN python3 -m pip install --no-cache-dir -r midori_program_requirments.txt
RUN python3 -m pip cache purge

RUN mkdir /tempmodels

RUN midori_ai_downloader llava-v1.5-13b-Q6_K.gguf && mv llava-v1.5-13b-Q6_K.gguf /tempmodels/llava-v1.5-13b-Q6_K.gguf
RUN midori_ai_downloader mmproj-model-f16.gguf && mv mmproj-model-f16.gguf /tempmodels/mmproj-model-f16.gguf

RUN apt-get autoclean && apt-get clean

RUN echo "Placeholder: USER localai"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ RUN python3 -m pip install --no-cache-dir cryptography aiohttp tk
RUN python3 -m pip install --no-cache-dir -r midori_program_requirments.txt
RUN python3 -m pip cache purge

RUN mkdir /tempmodels

RUN midori_ai_downloader llava-v1.5-13b-Q6_K.gguf && mv llava-v1.5-13b-Q6_K.gguf /tempmodels/llava-v1.5-13b-Q6_K.gguf
RUN midori_ai_downloader mmproj-model-f16.gguf && mv mmproj-model-f16.gguf /tempmodels/mmproj-model-f16.gguf

RUN apt-get autoclean && apt-get clean

RUN echo "Placeholder: USER localai"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ RUN python3 -m pip install --no-cache-dir cryptography aiohttp tk
RUN python3 -m pip install --no-cache-dir -r midori_program_requirments.txt
RUN python3 -m pip cache purge

RUN mkdir /tempmodels

RUN midori_ai_downloader llava-v1.5-13b-Q6_K.gguf && mv llava-v1.5-13b-Q6_K.gguf /tempmodels/llava-v1.5-13b-Q6_K.gguf
RUN midori_ai_downloader mmproj-model-f16.gguf && mv mmproj-model-f16.gguf /tempmodels/mmproj-model-f16.gguf

RUN apt-get autoclean && apt-get clean

RUN echo "Placeholder: USER localai"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ RUN python3 -m pip install --no-cache-dir cryptography aiohttp tk
RUN python3 -m pip install --no-cache-dir -r midori_program_requirments.txt
RUN python3 -m pip cache purge

RUN mkdir /tempmodels

RUN midori_ai_downloader llava-v1.5-13b-Q6_K.gguf && mv llava-v1.5-13b-Q6_K.gguf /tempmodels/llava-v1.5-13b-Q6_K.gguf
RUN midori_ai_downloader mmproj-model-f16.gguf && mv mmproj-model-f16.gguf /tempmodels/mmproj-model-f16.gguf

RUN apt-get autoclean && apt-get clean

RUN echo "Placeholder: USER localai"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,15 +162,13 @@ if [ ! -f bert-MiniLM-L6-v2q4_0.bin ]; then
fi

if [ ! -f ggml-model-q4_k.gguf ]; then
#wget --no-check-certificate --no-cache --no-cookies https://huggingface.co/PsiPi/liuhaotian_llava-v1.5-13b-GGUF/resolve/main/llava-v1.5-13b-Q6_K.gguf
#hf-downloader -un PsiPi -r liuhaotian_llava-v1.5-13b-GGUF -m llava-v1.5-13b-Q6_K.gguf
midori_ai_downloader llava-v1.5-13b-Q6_K.gguf
mv llava-v1.5-13b-Q6_K.gguf ggml-model-q4_k.gguf
echo moving model from temp folder
mv /tempmodels/llava-v1.5-13b-Q6_K.gguf ggml-model-q4_k.gguf
fi
if [ ! -f mmproj-model-f16.gguf ]; then
#wget --no-check-certificate --no-cache --no-cookies https://huggingface.co/PsiPi/liuhaotian_llava-v1.5-13b-GGUF/resolve/main/mmproj-model-f16.gguf
#hf-downloader -un PsiPi -r liuhaotian_llava-v1.5-13b-GGUF -m mmproj-model-f16.gguf
midori_ai_downloader mmproj-model-f16.gguf
echo moving model from temp folder
mv /tempmodels/mmproj-model-f16.gguf mmproj-model-f16.gguf

fi
if [ ! -f chat-simple.tmpl ]; then
wget --no-check-certificate --no-cache --no-cookies https://github.com/mudler/LocalAI/blob/b8240b4c1839089b9d06a3e2b1c629a294cff87e/examples/configurations/llava/chat-simple.tmpl
Expand Down

0 comments on commit b2824f6

Please sign in to comment.