Adjust settings for Ollama - try vulkan
There is also an ollama-rocm, should this fail
This commit is contained in:
parent
bafb941c87
commit
ccfbdfcb6a
1 changed files with 2 additions and 1 deletions
|
|
@ -421,7 +421,8 @@ in
|
|||
|
||||
services.ollama = {
|
||||
enable = true;
|
||||
acceleration = "rocm"; # hipblaslt not compiling at present
|
||||
#acceleration = "rocm"; # hipblaslt not compiling at present - build reports this no longer takes effect
|
||||
package = pkgs.ollama-vulkan;
|
||||
environmentVariables = {
|
||||
HCC_AMDGPU_TARGET = "gfx1031"; # used to be necessary, but doesn't seem to anymore
|
||||
OLLAMA_LOAD_TIMEOUT = "2m"; # Reduce load timeout (from 5 min)...if it's that big, forget it.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue