Spaces:
Build error
Build error
ffreemt
commited on
Commit
·
2ca2e1d
1
Parent(s):
d59455e
Update propmpt_tempalte ### HUMAN:\n{}\n### RESPONSE:
Browse files
README.md
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
---
|
| 2 |
-
title:
|
| 3 |
emoji: 🚀
|
| 4 |
colorFrom: green
|
| 5 |
colorTo: green
|
|
|
|
| 1 |
---
|
| 2 |
+
title: llama2-7b-chat-uncensored-ggml
|
| 3 |
emoji: 🚀
|
| 4 |
colorFrom: green
|
| 5 |
colorTo: green
|
app.py
CHANGED
|
@@ -55,6 +55,7 @@ if _:
|
|
| 55 |
# url = "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/blob/main/llama-2-13b-chat.ggmlv3.q2_K.bin"
|
| 56 |
url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q2_K.bin" # 2.87G
|
| 57 |
url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q4_K_M.bin" # 2.87G
|
|
|
|
| 58 |
|
| 59 |
|
| 60 |
prompt_template = """Below is an instruction that describes a task. Write a response that appropriately completes the request.
|
|
@@ -105,10 +106,15 @@ You are a helpful assistant.
|
|
| 105 |
{question} [/INST]
|
| 106 |
"""
|
| 107 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
_ = [elm for elm in prompt_template.splitlines() if elm.strip()]
|
| 109 |
stop_string = [elm.split(":")[0] + ":" for elm in _][-2]
|
| 110 |
|
| 111 |
-
logger.debug(f"{stop_string=}")
|
| 112 |
|
| 113 |
_ = psutil.cpu_count(logical=False) - 1
|
| 114 |
cpu_count: int = int(_) if _ else 1
|
|
|
|
| 55 |
# url = "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/blob/main/llama-2-13b-chat.ggmlv3.q2_K.bin"
|
| 56 |
url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q2_K.bin" # 2.87G
|
| 57 |
url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q4_K_M.bin" # 2.87G
|
| 58 |
+
url = "https://huggingface.co/TheBloke/llama2_7b_chat_uncensored-GGML/blob/main/llama2_7b_chat_uncensored.ggmlv3.q4_K_M.bin" # 4.08G
|
| 59 |
|
| 60 |
|
| 61 |
prompt_template = """Below is an instruction that describes a task. Write a response that appropriately completes the request.
|
|
|
|
| 106 |
{question} [/INST]
|
| 107 |
"""
|
| 108 |
|
| 109 |
+
prompt_template = """### HUMAN:
|
| 110 |
+
{question}
|
| 111 |
+
|
| 112 |
+
### RESPONSE:"""
|
| 113 |
+
|
| 114 |
_ = [elm for elm in prompt_template.splitlines() if elm.strip()]
|
| 115 |
stop_string = [elm.split(":")[0] + ":" for elm in _][-2]
|
| 116 |
|
| 117 |
+
logger.debug(f"{stop_string=} not used")
|
| 118 |
|
| 119 |
_ = psutil.cpu_count(logical=False) - 1
|
| 120 |
cpu_count: int = int(_) if _ else 1
|