Compare commits
2 Commits
8036d54fcd
...
72204b9817
Author | SHA1 | Date | |
---|---|---|---|
72204b9817 | |||
303cec1b3c |
@ -44,6 +44,10 @@ in
|
||||
# };
|
||||
#};
|
||||
|
||||
services.ollama = {
|
||||
enable = true;
|
||||
user="zaphar";
|
||||
};
|
||||
services.my-lorri.enable = true;
|
||||
services.durnitisp.enable = true;
|
||||
services.node-exporter.enable = true;
|
||||
|
@ -369,6 +369,8 @@ EOF";
|
||||
victoriametrics
|
||||
# TODO add sonic-pi here if it supports the arch
|
||||
unstablePkgs.dbeaver-bin
|
||||
postgresql
|
||||
unstablePkgs.ollama
|
||||
])
|
||||
#++ (with pkgs.ocamlPackages; [
|
||||
# dune_3
|
||||
@ -397,6 +399,7 @@ EOF";
|
||||
./modules/victoria-logs.nix
|
||||
./modules/vector.nix
|
||||
./modules/lorri.nix
|
||||
./modules/ollama.nix
|
||||
./darwin-configuration.nix
|
||||
];
|
||||
};
|
||||
|
@ -672,11 +672,11 @@ require('copilot').setup();
|
||||
|
||||
require('avante').setup ({
|
||||
provider = "copilot",
|
||||
openai = {
|
||||
model = "Claude Sonnet 3.7", -- your desired model (or use gpt-4o, etc.)
|
||||
timeout = 30000, -- Timeout in milliseconds, increase this for reasoning models
|
||||
claude = {
|
||||
endpoint = "https://api.anthropic.com",
|
||||
model = "claude-3-7-sonnet-20250219",
|
||||
timeout = 30000, -- Timeout in milliseconds
|
||||
temperature = 0,
|
||||
max_tokens = 8192, -- Increase this to include reasoning tokens (for reasoning models)
|
||||
--reasoning_effort = "medium", -- low|medium|high, only used for reasoning models
|
||||
},
|
||||
max_tokens = 20480,
|
||||
};
|
||||
})
|
||||
|
56
nix/base-system/modules/ollama.nix
Normal file
56
nix/base-system/modules/ollama.nix
Normal file
@ -0,0 +1,56 @@
|
||||
{pkgs, lib, config, ...}:
|
||||
with lib;
|
||||
let
|
||||
mkLauncher = import ../../packages/darwin-launcher.nix { inherit pkgs; };
|
||||
ollamaLauncher = mkLauncher ''
|
||||
exec ${pkgs.clio}/bin/clio \
|
||||
--out-path=${config.services.ollama.stdoutPath} \
|
||||
--err-path=${config.services.ollama.stdoutPath} \
|
||||
--pid-file=${config.services.ollama.pidPath} \
|
||||
--paranoid \
|
||||
-- \
|
||||
${pkgs.ollama}/bin/ollama \
|
||||
serve
|
||||
'';
|
||||
in
|
||||
{
|
||||
options.services.ollama = {
|
||||
enable = mkEnableOption "Enable the ollama agent";
|
||||
stdoutPath = mkOption {
|
||||
default = "/Users/${config.services.ollama.user}/config/ollama/out.log";
|
||||
};
|
||||
stderrPath = mkOption {
|
||||
default = "/Users/${config.services.ollama.user}/config/ollama/err.log";
|
||||
};
|
||||
pidPath = mkOption {
|
||||
default = "/Users/${config.services.ollama.user}/config/ollama/ollama.pid";
|
||||
};
|
||||
user = mkOption {
|
||||
default="zaphar";
|
||||
};
|
||||
};
|
||||
|
||||
config = {
|
||||
|
||||
launchd.user.agents.ollama = mkIf config.services.ollama.enable {
|
||||
serviceConfig = {
|
||||
ProgramArguments = [
|
||||
"${ollamaLauncher}"
|
||||
];
|
||||
EnvironmentVariables = {
|
||||
"OLLAMA_HOST" = "127.0.0.1:11434";
|
||||
"OLLAMA_MODELS" = "/Users/${config.services.ollama.user}/config/ollama/";
|
||||
};
|
||||
RunAtLoad = true;
|
||||
};
|
||||
};
|
||||
|
||||
environment.etc."newsyslog.d/org.nixos.ollama.conf" = mkIf config.services.ollama.enable {
|
||||
text = ''
|
||||
# logfilename [owner:group] mode count size when flags [/pid_file] [sig_num]
|
||||
${config.services.ollama.stdoutPath} zaphar:staff 644 10 1000 * BJ ${config.services.ollama.pidPath} 1
|
||||
${config.services.ollama.stderrPath} zaphar:staff 644 10 1000 * BJ ${config.services.ollama.pidPath} 1
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
@ -44,4 +44,5 @@ function disk_usage() {
|
||||
|
||||
eval "$(direnv hook zsh)"
|
||||
|
||||
export ANTHROPIC_API_KEY="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"
|
||||
export OPENAI_API_KEY="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"
|
||||
|
Loading…
x
Reference in New Issue
Block a user