sudo apt -y install git wget hipcc libhipblas-dev librocblas-dev cmake build-essential sudo usermod -aG video,render $USER rocminfo git clone https://github.com/ggerganov/llama.cpp.git cd llama.cpp HIPCXX=clang++-17 HIP_PATH="$(hipconfig -R)" cmake -S . -B build -DGGML_HIP=ON -DAMDGPU_TARGETS=gfx1034 -DCMAKE_BUILD_TYPE=Release # HIPCXX=clang++-17 cmake -H. -Bbuild -DGGML_HIPBLAS=ON -DCMAKE_BUILD_TYPE=Release make -j`nproc` -C build wget https://huggingface.co/bartowski/DeepSeek-R1-Distill-Qwen-32B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-32B-IQ2_S.gguf build/bin/llama-cli -ngl 32 --color -c 2048 --temp 0.6 --repeat_penalty 1.1 -n -1 -m DeepSeek-R1-Distill-Qwen-32B-IQ2_S.gguf # To run in a container: podman run -it --rm --group-add keep-groups --device=/dev/dri --device=/dev/kfd -v /home/ema:/home/ema --userns=keep-id llama.cpp