Skip to content
Snippets Groups Projects
Commit df1aef12 authored by Martin Forell's avatar Martin Forell
Browse files

new q for code gemma

parent 884e1954
Branches
No related tags found
No related merge requests found
......@@ -10,7 +10,7 @@ config_file: |
gpu_layers: 35
threads: 16
parameters:
model: downloads/llama_backend/codegemma-7b-it-f16.gguf
model: downloads/llama_backend/codegemma-1.1-2b_Q8_0.gguf
roles:
assistant: 'Assistant:'
system: 'System:'
......@@ -34,7 +34,7 @@ config_file: |
"model": "gpt-4",
"messages": [{"role": "user", "content": "How are you doing?", "temperature": 0.1}]
}'
# files:
# - filename: "downloads/llama_backend/codegemma-1.1-2b-Q5_K_M.gguf"
# sha256: "c96c1a87338404974159cede769861df4910d15db569c8f41054dde5ca0c82aa"
# uri: "https://huggingface.co/jacobhoffmann/codegemma-1.1-2b-GGUF/resolve/main/codegemma-1.1-2b_Q5_K_M.gguf"
\ No newline at end of file
files:
- filename: "downloads/llama_backend/codegemma-1.1-2b_Q8_0.gguf"
sha256: ""
uri: "https://huggingface.co/jacobhoffmann/codegemma-1.1-2b-GGUF/resolve/main/codegemma-1.1-2b_Q8_0.gguf"
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment