Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,24 +7,26 @@ from gradio.themes.utils import colors, fonts, sizes
|
|
| 7 |
from llama_cpp import Llama
|
| 8 |
from huggingface_hub import hf_hub_download
|
| 9 |
|
| 10 |
-
hf_hub_download(repo_id="
|
| 11 |
-
llm = Llama(model_path="./dolphin-2.
|
| 12 |
|
| 13 |
-
ins = '''<|im_start|>
|
|
|
|
|
|
|
| 14 |
{question}<|im_end|>
|
| 15 |
<|im_start|>assistant
|
| 16 |
'''
|
| 17 |
|
| 18 |
theme = gr.themes.Monochrome(
|
| 19 |
-
primary_hue="
|
| 20 |
-
secondary_hue="
|
| 21 |
neutral_hue="neutral",
|
| 22 |
radius_size=gr.themes.sizes.radius_sm,
|
| 23 |
font=[gr.themes.GoogleFont("Space Grotesk"), "ui-sans-serif", "system-ui", "sans-serif"],
|
| 24 |
)
|
| 25 |
|
| 26 |
-
def generate(instruction):
|
| 27 |
-
prompt = ins.format(question=instruction)
|
| 28 |
response = llm(prompt, stop=['<|im_start|>user', '<|im_end|>'])
|
| 29 |
result = response['choices'][0]['text']
|
| 30 |
return result
|
|
@@ -95,16 +97,17 @@ custom_theme = BlueTheme()
|
|
| 95 |
with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
|
| 96 |
with gr.Column():
|
| 97 |
gr.Markdown(
|
| 98 |
-
""" #
|
| 99 |
|
| 100 |
-
|
| 101 |
|
| 102 |
Type in the box below and click the button to generate answers to your most pressing questions!
|
| 103 |
""")
|
| 104 |
|
| 105 |
with gr.Row():
|
| 106 |
with gr.Column(scale=3):
|
| 107 |
-
instruction = gr.Textbox(placeholder="Enter your question here", label="Question
|
|
|
|
| 108 |
|
| 109 |
with gr.Box():
|
| 110 |
gr.Markdown("**Answer**")
|
|
@@ -118,7 +121,7 @@ with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
|
|
| 118 |
outputs=[output],
|
| 119 |
)
|
| 120 |
|
| 121 |
-
submit.click(generate, inputs=[instruction], outputs=[output])
|
| 122 |
-
instruction.submit(generate, inputs=[instruction], outputs=[output])
|
| 123 |
|
| 124 |
demo.queue(concurrency_count=1).launch(debug=True)
|
|
|
|
| 7 |
from llama_cpp import Llama
|
| 8 |
from huggingface_hub import hf_hub_download
|
| 9 |
|
| 10 |
+
hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9-llama3-8b-gguf", filename="dolphin-2.9-llama3-8b-q3_K_M.gguf", local_dir=".")
|
| 11 |
+
llm = Llama(model_path="./dolphin-2.9-llama3-8b-q3_K_M.gguf")
|
| 12 |
|
| 13 |
+
ins = '''<|im_start|>system
|
| 14 |
+
{system}<|im_end|>
|
| 15 |
+
<|im_start|>user
|
| 16 |
{question}<|im_end|>
|
| 17 |
<|im_start|>assistant
|
| 18 |
'''
|
| 19 |
|
| 20 |
theme = gr.themes.Monochrome(
|
| 21 |
+
primary_hue="red",
|
| 22 |
+
secondary_hue="orange",
|
| 23 |
neutral_hue="neutral",
|
| 24 |
radius_size=gr.themes.sizes.radius_sm,
|
| 25 |
font=[gr.themes.GoogleFont("Space Grotesk"), "ui-sans-serif", "system-ui", "sans-serif"],
|
| 26 |
)
|
| 27 |
|
| 28 |
+
def generate(instruction, system_prompt):
|
| 29 |
+
prompt = ins.format(question=instruction, system=system_prompt)
|
| 30 |
response = llm(prompt, stop=['<|im_start|>user', '<|im_end|>'])
|
| 31 |
result = response['choices'][0]['text']
|
| 32 |
return result
|
|
|
|
| 97 |
with gr.Blocks(theme=custom_theme, analytics_enabled=False, css=css) as demo:
|
| 98 |
with gr.Column():
|
| 99 |
gr.Markdown(
|
| 100 |
+
""" # 🦙 Dolphin4ALL
|
| 101 |
|
| 102 |
+
llama3 8b (q3_k_m)
|
| 103 |
|
| 104 |
Type in the box below and click the button to generate answers to your most pressing questions!
|
| 105 |
""")
|
| 106 |
|
| 107 |
with gr.Row():
|
| 108 |
with gr.Column(scale=3):
|
| 109 |
+
instruction = gr.Textbox(placeholder="Enter your question here", label="Question Prompts")
|
| 110 |
+
sys_prompt = gr.Textbox(placeholder="Enter your system instructions here", label="System Prompts")
|
| 111 |
|
| 112 |
with gr.Box():
|
| 113 |
gr.Markdown("**Answer**")
|
|
|
|
| 121 |
outputs=[output],
|
| 122 |
)
|
| 123 |
|
| 124 |
+
submit.click(generate, inputs=[instruction, sys_prompt], outputs=[output])
|
| 125 |
+
instruction.submit(generate, inputs=[instruction, sys_prompt], outputs=[output])
|
| 126 |
|
| 127 |
demo.queue(concurrency_count=1).launch(debug=True)
|