|
#!/bin/bash |
|
|
|
|
|
if [ ! -f "$HOME/.config/llama/llama-update.conf" ]; then |
|
mkdir -p "$HOME/.config/llama" |
|
cat <<EOF > "$HOME/.config/llama/llama-update.conf" |
|
LLAMA_CPP_GIT=$HOME/Work/llama.cpp |
|
KOBOLDCPP_GIT=$HOME/Work/koboldcpp |
|
BIN_PATH=$HOME/.local/bin |
|
EOF |
|
fi |
|
|
|
source "$HOME"/.config/llama/llama-update.conf |
|
|
|
|
|
if [ ! -d "$LLAMA_CPP_GIT" ]; then |
|
git clone https://github.com/ggerganov/llama.cpp "$LLAMA_CPP_GIT" |
|
else |
|
cd "$LLAMA_CPP_GIT" || exit |
|
git pull |
|
fi |
|
|
|
cd "$LLAMA_CPP_GIT" || exit |
|
|
|
rm -rf build |
|
mkdir build |
|
cd build || exit |
|
cmake .. |
|
cmake --build . --config Release |
|
|
|
mkdir -p "$BIN_PATH" |
|
install -c -v -m 755 bin/main "$BIN_PATH/llama" |
|
install -c -v bin/ggml-metal.metal "$BIN_PATH" |
|
install -c -v -m 755 bin/llava-cli "$BIN_PATH"/llava |
|
install -c -v -m 755 bin/finetune "$BIN_PATH/llama-finetune" |
|
install -c -v -m 755 bin/speculative "$BIN_PATH/llama-speculative" |
|
install -c -v -m 755 bin/server "$BIN_PATH/llama-server" |
|
|
|
|
|
if [ ! -d "$KOBOLDCPP_GIT" ]; then |
|
git clone https://github.com/LostRuins/koboldcpp "$KOBOLDCPP_GIT" |
|
else |
|
cd "$KOBOLDCPP_GIT" || exit |
|
git pull |
|
fi |
|
|
|
|
|
cd "$KOBOLDCPP_GIT" || exit |
|
make clean |
|
make LLAMA_METAL=1 |
|
|
|
echo "Clear cache" |
|
llama-clear-cache.sh |
|
|