Skip to content

Commit

Permalink
pruning upstreamed and unused python packages
Browse files Browse the repository at this point in the history
  • Loading branch information
jpetrucciani committed Jan 2, 2025
1 parent 9d9c7a8 commit 8c75de7
Show file tree
Hide file tree
Showing 9 changed files with 1 addition and 1,327 deletions.
76 changes: 0 additions & 76 deletions mods/python/ai/bindings.nix
Original file line number Diff line number Diff line change
Expand Up @@ -39,82 +39,6 @@ rec {

};

llama-cpp-python =
let
osSpecific =
if isM1 then with darwin.apple_sdk_11_0.frameworks; [ Accelerate MetalKit MetalPerformanceShaders MetalPerformanceShadersGraph ]
else if isDarwin then with darwin.apple_sdk.frameworks; [ Accelerate CoreGraphics CoreVideo ]
else [ ];
in
buildPythonPackage rec {
pname = "llama-cpp-python";
version = "0.3.4";
pyproject = true;
src = fetchFromGitHub {
owner = "abetlen";
repo = pname;
rev = "refs/tags/v${version}";
hash = "sha256-D8k+vdZPQGUTyAcq+IYFn52peABMg1ZgPuef90Ya9Kg=";
};

cuda = false;

_CMAKE_ARGS = (optionals isM1 [ "-DLLAMA_METAL=on" ]) ++ (optionals cuda [ "-DLLAMA_CUBLAS=on" ]);
CMAKE_ARGS = builtins.concatStringsSep " " _CMAKE_ARGS;
FORCE_CMAKE = if (isM1 || cuda) then "1" else null;

preConfigure = ''
cp -r ${llama-cpp-pin}/. ./vendor/llama.cpp
chmod -R +w ./vendor/llama.cpp
'';
preBuild = ''
cd ..
sed -E -i \
-e '/"ninja/d' \
-e '/"cmake/d' \
-e 's/(requires =).*/\1 ["scikit-build-core[pyproject]>=0.8.2"]/g' \
./pyproject.toml
'';
buildInputs = osSpecific;

nativeBuildInputs = (with prev.pkgs; [
cmake
ninja
]) ++ (with prev; [
pythonRelaxDepsHook
scikit-build-core
pathspec
pyproject-metadata
]) ++ (optionals cuda [ cudatoolkit ]);
pythonRelaxDeps = [ "diskcache" ];
propagatedBuildInputs = with prev; [
diskcache
jinja2
numpy
typing-extensions

# server mode
fastapi
sse-starlette
uvicorn
];

pythonImportsCheck = [ "llama_cpp" ];

passthru.cuda = llama-cpp-python.overridePythonAttrs (old: {
CMAKE_ARGS = "-DLLAMA_CUBLAS=on";
FORCE_CMAKE = 1;
nativeBuildInputs = old.nativeBuildInputs ++ [ cudatoolkit ];
});

meta = {
description = "A Python wrapper for llama.cpp";
homepage = "https://github.com/abetlen/llama-cpp-python";
license = licenses.mit;
maintainers = with maintainers; [ jpetrucciani ];
};
};

ggml-python =
let
osSpecific =
Expand Down
113 changes: 0 additions & 113 deletions mods/python/ai/dataset.nix

This file was deleted.

74 changes: 0 additions & 74 deletions mods/python/ai/deployment.nix

This file was deleted.

49 changes: 0 additions & 49 deletions mods/python/ai/eval.nix

This file was deleted.

Loading

0 comments on commit 8c75de7

Please sign in to comment.