File size: 1,295 Bytes
0fe9663
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import gradio as gr
from models import generate_response
from config import MODEL_NAME

with gr.Blocks(title="MobileLLM-Pro Chat") as demo:
    gr.Markdown("# MobileLLM-Pro Chat Interface")
    gr.Markdown("Built with [anycoder](https://huggingface.co/spaces/akhaliq/anycoder)")
    
    system_prompt = gr.Textbox(
        label="System Prompt", 
        value="You are a helpful AI assistant.", 
        lines=2
    )
    temperature = gr.Slider(
        label="Temperature", 
        minimum=0.0, 
        maximum=2.0, 
        value=0.7, 
        step=0.1
    )
    max_tokens = gr.Slider(
        label="Max New Tokens", 
        minimum=10, 
        maximum=1000, 
        value=256, 
        step=10
    )
    top_p = gr.Slider(
        label="Top P", 
        minimum=0.0, 
        maximum=1.0, 
        value=0.9, 
        step=0.05
    )
    
    chat_interface = gr.ChatInterface(
        fn=lambda message, history, sys_prompt, temp, max_tok, top_p_val: generate_response(
            message, history, sys_prompt, temp, max_tok, top_p_val
        ),
        additional_inputs=[system_prompt, temperature, max_tokens, top_p],
        title="Chat with MobileLLM-Pro",
        description="Adjust parameters and chat with the model."
    )

if __name__ == "__main__":
    demo.launch()