Spaces:
Running
on
Zero
Running
on
Zero
feat: Add DeepSeek API support with provider selection
Browse files
app.py
CHANGED
|
@@ -86,7 +86,7 @@ def get_files_infos(files):
|
|
| 86 |
return results
|
| 87 |
|
| 88 |
|
| 89 |
-
def get_completion(prompt, files_info, top_p, temperature):
|
| 90 |
# Create table header
|
| 91 |
files_info_string = "| Type | Name | Dimensions | Duration | Audio Channels |\n"
|
| 92 |
files_info_string += "|------|------|------------|-----------|--------|\n"
|
|
@@ -154,8 +154,17 @@ YOUR FFMPEG COMMAND:
|
|
| 154 |
print(msg["content"])
|
| 155 |
print("=====================\n")
|
| 156 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 157 |
completion = client.chat.completions.create(
|
| 158 |
-
model=
|
| 159 |
messages=messages,
|
| 160 |
temperature=temperature,
|
| 161 |
top_p=top_p,
|
|
@@ -183,7 +192,7 @@ YOUR FFMPEG COMMAND:
|
|
| 183 |
raise Exception("API Error")
|
| 184 |
|
| 185 |
|
| 186 |
-
def update(files, prompt, top_p=1, temperature=1):
|
| 187 |
if prompt == "":
|
| 188 |
raise gr.Error("Please enter a prompt.")
|
| 189 |
|
|
@@ -202,7 +211,7 @@ def update(files, prompt, top_p=1, temperature=1):
|
|
| 202 |
while attempts < 2:
|
| 203 |
print("ATTEMPT", attempts)
|
| 204 |
try:
|
| 205 |
-
command_string = get_completion(prompt, files_info, top_p, temperature)
|
| 206 |
print(
|
| 207 |
f"""///PROMTP {prompt} \n\n/// START OF COMMAND ///:\n\n{command_string}\n\n/// END OF COMMAND ///\n\n"""
|
| 208 |
)
|
|
@@ -260,6 +269,11 @@ with gr.Blocks() as demo:
|
|
| 260 |
)
|
| 261 |
with gr.Row():
|
| 262 |
with gr.Column():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 263 |
user_files = gr.File(
|
| 264 |
file_count="multiple",
|
| 265 |
label="Media files",
|
|
@@ -295,7 +309,7 @@ with gr.Blocks() as demo:
|
|
| 295 |
|
| 296 |
btn.click(
|
| 297 |
fn=update,
|
| 298 |
-
inputs=[user_files, user_prompt, top_p, temperature],
|
| 299 |
outputs=[generated_video, generated_command],
|
| 300 |
)
|
| 301 |
with gr.Row():
|
|
|
|
| 86 |
return results
|
| 87 |
|
| 88 |
|
| 89 |
+
def get_completion(prompt, files_info, top_p, temperature, api_choice):
|
| 90 |
# Create table header
|
| 91 |
files_info_string = "| Type | Name | Dimensions | Duration | Audio Channels |\n"
|
| 92 |
files_info_string += "|------|------|------------|-----------|--------|\n"
|
|
|
|
| 154 |
print(msg["content"])
|
| 155 |
print("=====================\n")
|
| 156 |
|
| 157 |
+
if api_choice == "DeepSeek":
|
| 158 |
+
client.base_url = "https://api.deepseek.com/v1"
|
| 159 |
+
client.api_key = DEEPSEEK_API_KEY
|
| 160 |
+
model = "deepseek-chat"
|
| 161 |
+
else:
|
| 162 |
+
client.base_url = "https://api-inference.huggingface.co/v1/"
|
| 163 |
+
client.api_key = HF_API_KEY
|
| 164 |
+
model = "Qwen/Qwen2.5-Coder-32B-Instruct"
|
| 165 |
+
|
| 166 |
completion = client.chat.completions.create(
|
| 167 |
+
model=model,
|
| 168 |
messages=messages,
|
| 169 |
temperature=temperature,
|
| 170 |
top_p=top_p,
|
|
|
|
| 192 |
raise Exception("API Error")
|
| 193 |
|
| 194 |
|
| 195 |
+
def update(files, prompt, top_p=1, temperature=1, api_choice="HuggingFace"):
|
| 196 |
if prompt == "":
|
| 197 |
raise gr.Error("Please enter a prompt.")
|
| 198 |
|
|
|
|
| 211 |
while attempts < 2:
|
| 212 |
print("ATTEMPT", attempts)
|
| 213 |
try:
|
| 214 |
+
command_string = get_completion(prompt, files_info, top_p, temperature, api_choice)
|
| 215 |
print(
|
| 216 |
f"""///PROMTP {prompt} \n\n/// START OF COMMAND ///:\n\n{command_string}\n\n/// END OF COMMAND ///\n\n"""
|
| 217 |
)
|
|
|
|
| 269 |
)
|
| 270 |
with gr.Row():
|
| 271 |
with gr.Column():
|
| 272 |
+
api_choice = gr.Radio(
|
| 273 |
+
choices=["HuggingFace", "DeepSeek"],
|
| 274 |
+
value="HuggingFace",
|
| 275 |
+
label="API Provider"
|
| 276 |
+
)
|
| 277 |
user_files = gr.File(
|
| 278 |
file_count="multiple",
|
| 279 |
label="Media files",
|
|
|
|
| 309 |
|
| 310 |
btn.click(
|
| 311 |
fn=update,
|
| 312 |
+
inputs=[user_files, user_prompt, top_p, temperature, api_choice],
|
| 313 |
outputs=[generated_video, generated_command],
|
| 314 |
)
|
| 315 |
with gr.Row():
|