Add some comments re ollama

Need to clean this up, but would be nice to have API key or more security on it first
This commit is contained in:
Chris (wolcen) Thompson 2025-09-24 10:30:40 -04:00
parent 9f901f7a30
commit 931780c38f

View file

@ -405,7 +405,39 @@ in
environmentVariables = {
HCC_AMDGPU_TARGET = "gfx1031"; # used to be necessary, but doesn't seem to anymore
OLLAMA_LOAD_TIMEOUT = "2"; # Reduce load timeout (from 5 min)...if it's that big, forget it.
OLLAMA_ORIGINS = "10.40.4.2"; # Only accept connections from locally.
# OK, so origins is more about setting up CORS than firewalling things. (and requires e.g. http[s]://IP|Host/* etc)
#OLLAMA_ORIGINS = "10.40.4.2"; # Only accept connections from locally.
# Default/mapped from config ENV vars:
#CUDA_VISIBLE_DEVICES:
#GPU_DEVICE_ORDINAL:
#HIP_VISIBLE_DEVICES:
#HSA_OVERRIDE_GFX_VERSION:10.3.0
#HTTPS_PROXY:
#HTTP_PROXY:
#NO_PROXY:
#OLLAMA_CONTEXT_LENGTH:4096
#OLLAMA_DEBUG:INFO
#OLLAMA_FLASH_ATTENTION:false
#OLLAMA_GPU_OVERHEAD:0
#OLLAMA_HOST:http://10.40.4.2:11434
#OLLAMA_INTEL_GPU:false
#OLLAMA_KEEP_ALIVE:5m0s
#OLLAMA_KV_CACHE_TYPE:
#OLLAMA_LLM_LIBRARY:
#OLLAMA_LOAD_TIMEOUT:2s
#OLLAMA_MAX_LOADED_MODELS:0
#OLLAMA_MAX_QUEUE:512
#OLLAMA_MODELS:/var/lib/ollama/models
#OLLAMA_MULTIUSER_CACHE:false
#OLLAMA_NEW_ENGINE:false
#OLLAMA_NEW_ESTIMATES:false
#OLLAMA_NOHISTORY:false
#OLLAMA_NOPRUNE:false
#OLLAMA_NUM_PARALLEL:1
#OLLAMA_ORIGINS:[http://localhost https://localhost http://localhost:* https://localhost:* http://127.0.0.1 https://127.0.0.1 http://127.0.0.1:* https://127.0.0.1:* http://0.0.0.0 https://0.0.0.0 http://0.0.0.0 https://0.0.0.0 http://0.0.0.0:* https://0.0.0.0:* app://* file://* tauri://* vscode-webview://* vscode-file://*]
#OLLAMA_SCHED_SPREAD:false
#ROCR_VISIBLE_DEVICES: http_proxy: https_proxy: no_proxy:
};
host = "10.40.4.2"; # See also ip46tables update in firewall extracommands
rocmOverrideGfx = "10.3.0";
@ -420,6 +452,7 @@ in
docker.enable = true;
podman.enable = false;
docker.storageDriver = "btrfs"; # Only when using BTRFS! (wolcen approved!)
#docker.logDriver = "local"; # Default journald
oci-containers = {
backend = "docker";
containers = {