Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -24,7 +24,7 @@ if not HF_API_KEY:
|
|
| 24 |
raise ValueError("Please set the HF_API_KEY environment variable.")
|
| 25 |
|
| 26 |
# You can use different models for different tasks
|
| 27 |
-
MAIN_LLM_ENDPOINT = "https://router.huggingface.co/hf-inference/models/
|
| 28 |
REASONING_LLM_ENDPOINT = "https://router.huggingface.co/hf-inference/models/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B/v1/chat/completions" # Can be the same as main if needed
|
| 29 |
CRITIC_LLM_ENDPOINT = "https://router.huggingface.co/hf-inference/models/Qwen/QwQ-32B-Preview/v1/chat/completions" # Can be the same as main if needed
|
| 30 |
|
|
|
|
| 24 |
raise ValueError("Please set the HF_API_KEY environment variable.")
|
| 25 |
|
| 26 |
# You can use different models for different tasks
|
| 27 |
+
MAIN_LLM_ENDPOINT = "'https://router.huggingface.co/hf-inference/models/mistralai/Mistral-Nemo-Instruct-2407/v1/chat/completions" # Replace with your actual endpoint
|
| 28 |
REASONING_LLM_ENDPOINT = "https://router.huggingface.co/hf-inference/models/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B/v1/chat/completions" # Can be the same as main if needed
|
| 29 |
CRITIC_LLM_ENDPOINT = "https://router.huggingface.co/hf-inference/models/Qwen/QwQ-32B-Preview/v1/chat/completions" # Can be the same as main if needed
|
| 30 |
|