Adjust settings for Ollama - try vulkan

There is also an ollama-rocm, should this fail
This commit is contained in:
Chris (wolcen) Thompson 2025-12-12 12:55:40 -05:00
parent bafb941c87
commit ccfbdfcb6a

View file

@ -421,7 +421,8 @@ in
services.ollama = {
enable = true;
acceleration = "rocm"; # hipblaslt not compiling at present
#acceleration = "rocm"; # hipblaslt not compiling at present - build reports this no longer takes effect
package = pkgs.ollama-vulkan;
environmentVariables = {
HCC_AMDGPU_TARGET = "gfx1031"; # used to be necessary, but doesn't seem to anymore
OLLAMA_LOAD_TIMEOUT = "2m"; # Reduce load timeout (from 5 min)...if it's that big, forget it.