| Make our gpu model uncensored llama:8b. - annna - Annna the nice friendly bot. | |
| git clone git://bitreich.org/annna/ git://enlrupgkhuxnvlhsf6lc3fziv5h2hhfrinws6… | |
| Log | |
| Files | |
| Refs | |
| Tags | |
| README | |
| --- | |
| commit bcfb86d18d24d78d5f7a2b6e488c231846271a17 | |
| parent 2d5d53bad25e3ca4b28eed6bbaf6b101c64d4859 | |
| Author: Annna Robert-Houdin <[email protected]> | |
| Date: Sun, 19 Oct 2025 11:57:54 +0200 | |
| Make our gpu model uncensored llama:8b. | |
| Diffstat: | |
| M gpt | 5 +++-- | |
| 1 file changed, 3 insertions(+), 2 deletions(-) | |
| --- | |
| diff --git a/gpt b/gpt | |
| @@ -47,6 +47,7 @@ function local_llama() { | |
| function remote_llama() { | |
| prompt="$1" | |
| + ggmlmodel="mannix/llama3.1-8b-abliterated" | |
| if [ -z "$prompt" ]; | |
| then | |
| cat \ | |
| @@ -54,7 +55,7 @@ function remote_llama() { | |
| ollama run \ | |
| --hidethinking \ | |
| --nowordwrap \ | |
| - llama3:8b \ | |
| + "${ggmlmodel}" \ | |
| "${prompt}" \ | |
| | head -n 1 \ | |
| | sed -e 's/^"//;s/"$//;' | |
| @@ -64,7 +65,7 @@ function remote_llama() { | |
| ollama run \ | |
| --hidethinking \ | |
| --nowordwrap \ | |
| - llama3:8b \ | |
| + "${ggmlmodel}" \ | |
| "${prompt}" \ | |
| | head -n 1 \ | |
| | sed -e 's/^"//;s/"$//;' |