Compare commits
2 commits
bcb892e5a6
...
51e416b7e0
Author | SHA1 | Date | |
---|---|---|---|
51e416b7e0 | |||
c1dfb6b589 |
1 changed files with 142 additions and 54 deletions
|
@ -3,7 +3,6 @@
|
|||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# TODO: Add `eza`, `gitui`, `opencode`
|
||||
# TODO: Increase context window for `aichat_reasoning_remote`
|
||||
|
||||
{
|
||||
config,
|
||||
|
@ -15,12 +14,30 @@
|
|||
}:
|
||||
|
||||
let
|
||||
# localFastModel = "qwen2.5-coder-num_ctx";
|
||||
# localReasoningModel = "deepseek-r1-num_ctx";
|
||||
llm = {
|
||||
remote = {
|
||||
coding = "moonshotai/kimi-k2:free";
|
||||
reasoning = "deepseek/deepseek-r1-0528:free";
|
||||
free = {
|
||||
key = "openrouter";
|
||||
name = "OpenRouter";
|
||||
url = "https://openrouter.ai/api/v1";
|
||||
env = "OPENROUTER_API_KEY";
|
||||
models = {
|
||||
coding = {
|
||||
key = "moonshotai/kimi-k2:free";
|
||||
name = "Kimi K2 (free)";
|
||||
provider = [ "chutes/fp8" ];
|
||||
tools = false;
|
||||
reasoning = false;
|
||||
vision = true;
|
||||
};
|
||||
reasoning = {
|
||||
key = "deepseek/deepseek-r1-0528:free";
|
||||
name = "Deepseek R1 (free)";
|
||||
provider = [ "chutes" ];
|
||||
tools = false;
|
||||
reasoning = true;
|
||||
vision = true;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
in
|
||||
|
@ -151,7 +168,9 @@ in
|
|||
nushell = {
|
||||
enable = true;
|
||||
environmentVariables = {
|
||||
OPENROUTER_API_KEY = lib.hm.nushell.mkNushellInline "cat ${config.sops.secrets.openrouter_api_key.path}";
|
||||
${llm.free.env} = lib.hm.nushell.mkNushellInline "cat ${
|
||||
config.sops.secrets."${llm.free.key}_api_key".path
|
||||
}";
|
||||
};
|
||||
settings = {
|
||||
completions = {
|
||||
|
@ -164,9 +183,9 @@ in
|
|||
};
|
||||
# set -x ZATHURA_PLUGINS_PATH $GUIX_HOME/lib/zathura
|
||||
shellAliases = {
|
||||
aichat_reasoning_remote = "${pkgs.aichat}/bin/aichat --model openrouter:${llm.remote.reasoning}";
|
||||
aichat_coding_remote = "${pkgs.aichat}/bin/aichat --model openrouter:${llm.remote.coding}";
|
||||
codex_remote = "${pkgs.codex}/bin/codex --provider openrouter --model ${llm.remote.coding}";
|
||||
aichat_reasoning_free = "${pkgs.aichat}/bin/aichat --model openrouter:${llm.free.models.reasoning.key}";
|
||||
aichat_coding_free = "${pkgs.aichat}/bin/aichat --model ${llm.free.key}:${llm.free.models.coding.key}";
|
||||
codex_free = "${pkgs.codex}/bin/codex --provider ${llm.free.key} --model ${llm.free.models.coding.key}";
|
||||
hotspot = "sudo sysctl net.ipv4.ip_default_ttl=65";
|
||||
};
|
||||
};
|
||||
|
@ -384,44 +403,44 @@ in
|
|||
};
|
||||
codex = {
|
||||
enable = true;
|
||||
custom-instructions = ''
|
||||
## 10. Applying Patch Files with patch
|
||||
# custom-instructions = ''
|
||||
# ## 10. Applying Patch Files with patch
|
||||
|
||||
When the built-in `apply_patch` tool or `git apply` fails to apply a diff/patch file (especially if the file being patched contains special characters that might confuse simpler patch tools), the standard `patch` utility can be a more robust alternative.
|
||||
# When the built-in `apply_patch` tool or `git apply` fails to apply a diff/patch file (especially if the file being patched contains special characters that might confuse simpler patch tools), the standard `patch` utility can be a more robust alternative.
|
||||
|
||||
- **Patch File Format**: Ensure your patch file is in a standard unified diff format. Typically, these patches are generated with `git diff > my_feature.patch` or manually crafted. If the patch refers to files with `a/` and `b/` prefixes (e.g., `--- a/file.txt`, `+++ b/file.txt`), you'll use the `-p1` option.
|
||||
# - **Patch File Format**: Ensure your patch file is in a standard unified diff format. Typically, these patches are generated with `git diff > my_feature.patch` or manually crafted. If the patch refers to files with `a/` and `b/` prefixes (e.g., `--- a/file.txt`, `+++ b/file.txt`), you'll use the `-p1` option.
|
||||
|
||||
- **Creating the Patch File**: You can create a patch file using shell redirection, for example:
|
||||
# - **Creating the Patch File**: You can create a patch file using shell redirection, for example:
|
||||
|
||||
```bash`
|
||||
cat <<'EOF' > fix_descriptive_name.patch
|
||||
--- a/path/to/your/file.ext
|
||||
+++ b/path/to/your/file.ext
|
||||
@@ -line_num,num_lines +line_num,num_lines @@ context_or_change
|
||||
-old_line_content
|
||||
+new_line_content
|
||||
EOF
|
||||
```
|
||||
# ```bash`
|
||||
# cat <<'EOF' > fix_descriptive_name.patch
|
||||
# --- a/path/to/your/file.ext
|
||||
# +++ b/path/to/your/file.ext
|
||||
# @@ -line_num,num_lines +line_num,num_lines @@ context_or_change
|
||||
# -old_line_content
|
||||
# +new_line_content
|
||||
# EOF
|
||||
# ```
|
||||
|
||||
*Important*: Ensure the `EOF` marker is on its own line with no trailing spaces.
|
||||
# *Important*: Ensure the `EOF` marker is on its own line with no trailing spaces.
|
||||
|
||||
- **Applying the Patch**: Use the `patch` command via the `shell` tool. The `-p1` option strips the leading component from file paths in the patch file (`a/`, `b/`).
|
||||
# - **Applying the Patch**: Use the `patch` command via the `shell` tool. The `-p1` option strips the leading component from file paths in the patch file (`a/`, `b/`).
|
||||
|
||||
```
|
||||
# Example: Apply a patch file
|
||||
default_api.shell(command=["sh", "-c", "patch -p1 < fix_descriptive_name.patch"])
|
||||
```
|
||||
# ```
|
||||
# # Example: Apply a patch file
|
||||
# default_api.shell(command=["sh", "-c", "patch -p1 < fix_descriptive_name.patch"])
|
||||
# ```
|
||||
|
||||
- **Verification**: After applying, always verify that the target file has been changed as expected (e.g., using `cat` or `git diff`).
|
||||
# - **Verification**: After applying, always verify that the target file has been changed as expected (e.g., using `cat` or `git diff`).
|
||||
|
||||
- **Cleanup**: Remove the patch file if it's no longer needed:
|
||||
# - **Cleanup**: Remove the patch file if it's no longer needed:
|
||||
|
||||
```
|
||||
default_api.shell(command=["rm", "fix_descriptive_name.patch"])
|
||||
```
|
||||
'';
|
||||
# ```
|
||||
# default_api.shell(command=["rm", "fix_descriptive_name.patch"])
|
||||
# ```
|
||||
# '';
|
||||
settings = {
|
||||
model = "${llm.remote.coding}";
|
||||
model = llm.free.models.coding.key;
|
||||
provider = "ollama";
|
||||
providers = {
|
||||
ollama = {
|
||||
|
@ -429,18 +448,77 @@ in
|
|||
baseURL = "http://localhost:11434/v1";
|
||||
envKey = "OLLAMA_API_KEY";
|
||||
};
|
||||
openrouter = {
|
||||
name = "OpenRouter";
|
||||
baseURL = "https://openrouter.ai/api/v1";
|
||||
envKey = "OPENROUTER_API_KEY";
|
||||
${llm.free.key} = {
|
||||
name = llm.free.name;
|
||||
baseURL = llm.free.url;
|
||||
envKey = llm.free.env;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
opencode = {
|
||||
enable = true;
|
||||
settings = {
|
||||
"$schema" = "https://opencode.ai/config.json";
|
||||
provider = {
|
||||
${llm.free.key} = {
|
||||
npm = "@ai-sdk/openai-compatible";
|
||||
name = llm.free.name;
|
||||
options = {
|
||||
baseURL = llm.free.url;
|
||||
apiKey = "{env:${llm.free.env}}";
|
||||
};
|
||||
models = {
|
||||
${llm.free.models.coding.key} = (
|
||||
let
|
||||
model = llm.free.models.coding;
|
||||
in
|
||||
{
|
||||
id = model.key;
|
||||
name = model.name;
|
||||
options = {
|
||||
tools = model.tools;
|
||||
${llm.free.name} = {
|
||||
provider = {
|
||||
order = model.provider;
|
||||
allow_fallbacks = false;
|
||||
};
|
||||
};
|
||||
};
|
||||
tool_call = model.tools;
|
||||
reasoning = model.reasoning;
|
||||
}
|
||||
);
|
||||
${llm.free.models.reasoning.key} = (
|
||||
let
|
||||
model = llm.free.models.reasoning;
|
||||
in
|
||||
{
|
||||
id = model.key;
|
||||
name = model.name;
|
||||
options = {
|
||||
tools = model.tools;
|
||||
${llm.free.name} = {
|
||||
provider = {
|
||||
order = model.provider;
|
||||
allow_fallbacks = false;
|
||||
};
|
||||
};
|
||||
};
|
||||
tool_call = model.tools;
|
||||
reasoning = model.reasoning;
|
||||
}
|
||||
);
|
||||
};
|
||||
};
|
||||
};
|
||||
model = "${llm.free.key}:${llm.free.models.coding.key}";
|
||||
};
|
||||
};
|
||||
aichat = {
|
||||
enable = true;
|
||||
settings = {
|
||||
model = "openrouter:${llm.remote.coding}";
|
||||
model = "${llm.free.key}:${llm.free.models.coding.key}";
|
||||
clients = [
|
||||
# {
|
||||
# type = "openai-compatible";
|
||||
|
@ -461,19 +539,29 @@ in
|
|||
# }
|
||||
{
|
||||
type = "openai-compatible";
|
||||
name = "openrouter";
|
||||
api_base = "https://openrouter.ai/api/v1";
|
||||
name = llm.free.key;
|
||||
api_base = llm.free.url;
|
||||
models = [
|
||||
{
|
||||
name = "${llm.remote.coding}";
|
||||
supports_function_calling = true;
|
||||
supports_vision = true;
|
||||
}
|
||||
{
|
||||
name = "${llm.remote.reasoning}";
|
||||
supports_function_calling = true;
|
||||
supports_vision = true;
|
||||
}
|
||||
(
|
||||
let
|
||||
model = llm.free.models.coding;
|
||||
in
|
||||
{
|
||||
name = model.key;
|
||||
supports_function_calling = model.tools;
|
||||
supports_vision = model.vision;
|
||||
}
|
||||
)
|
||||
(
|
||||
let
|
||||
model = llm.free.models.reasoning;
|
||||
in
|
||||
{
|
||||
name = model.key;
|
||||
supports_function_calling = model.tools;
|
||||
supports_vision = model.vision;
|
||||
}
|
||||
)
|
||||
];
|
||||
}
|
||||
];
|
||||
|
|
Loading…
Add table
Reference in a new issue