Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -8,9 +8,9 @@ from threading import Thread
|
|
| 8 |
|
| 9 |
MODEL_LIST = ["meta-llama/Meta-Llama-3.1-8B-Instruct"]
|
| 10 |
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
| 11 |
-
MODEL =
|
| 12 |
|
| 13 |
-
TITLE = "<h1><center>
|
| 14 |
|
| 15 |
PLACEHOLDER = """
|
| 16 |
<center>
|
|
@@ -83,7 +83,7 @@ def stream_chat(
|
|
| 83 |
top_k = top_k,
|
| 84 |
temperature = temperature,
|
| 85 |
repetition_penalty=penalty,
|
| 86 |
-
eos_token_id=
|
| 87 |
streamer=streamer,
|
| 88 |
)
|
| 89 |
|
|
@@ -109,7 +109,11 @@ with gr.Blocks(css=CSS, theme="soft") as demo:
|
|
| 109 |
additional_inputs_accordion=gr.Accordion(label="βοΈ Parameters", open=False, render=False),
|
| 110 |
additional_inputs=[
|
| 111 |
gr.Textbox(
|
| 112 |
-
value="
|
|
|
|
|
|
|
|
|
|
|
|
|
| 113 |
label="System Prompt",
|
| 114 |
render=False,
|
| 115 |
),
|
|
|
|
| 8 |
|
| 9 |
MODEL_LIST = ["meta-llama/Meta-Llama-3.1-8B-Instruct"]
|
| 10 |
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
| 11 |
+
MODEL = "Skywork/Skywork-o1-Open-Llama-3.1-8B"
|
| 12 |
|
| 13 |
+
TITLE = "<h1><center>Skywork-o1-Open-Llama-3.1-8B</center></h1>"
|
| 14 |
|
| 15 |
PLACEHOLDER = """
|
| 16 |
<center>
|
|
|
|
| 83 |
top_k = top_k,
|
| 84 |
temperature = temperature,
|
| 85 |
repetition_penalty=penalty,
|
| 86 |
+
eos_token_id=128009,
|
| 87 |
streamer=streamer,
|
| 88 |
)
|
| 89 |
|
|
|
|
| 109 |
additional_inputs_accordion=gr.Accordion(label="βοΈ Parameters", open=False, render=False),
|
| 110 |
additional_inputs=[
|
| 111 |
gr.Textbox(
|
| 112 |
+
value="""
|
| 113 |
+
You are Skywork-o1, a thinking model developed by Skywork AI, specializing in solving complex problems involving mathematics, coding, and logical reasoning through deep thought.
|
| 114 |
+
When faced with a user's request, you first engage in a lengthy and in-depth thinking process to explore possible solutions to the problem.
|
| 115 |
+
After completing your thoughts, you then provide a detailed explanation of the solution process in your response.
|
| 116 |
+
""",
|
| 117 |
label="System Prompt",
|
| 118 |
render=False,
|
| 119 |
),
|