Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from models import generate_response | |
| from config import MODEL_NAME | |
| with gr.Blocks(title="MobileLLM-Pro Chat") as demo: | |
| gr.Markdown("# MobileLLM-Pro Chat Interface") | |
| gr.Markdown("Built with [anycoder](https://huggingface.co/spaces/akhaliq/anycoder)") | |
| system_prompt = gr.Textbox( | |
| label="System Prompt", | |
| value="You are a helpful AI assistant.", | |
| lines=2 | |
| ) | |
| temperature = gr.Slider( | |
| label="Temperature", | |
| minimum=0.0, | |
| maximum=2.0, | |
| value=0.7, | |
| step=0.1 | |
| ) | |
| max_tokens = gr.Slider( | |
| label="Max New Tokens", | |
| minimum=10, | |
| maximum=1000, | |
| value=256, | |
| step=10 | |
| ) | |
| top_p = gr.Slider( | |
| label="Top P", | |
| minimum=0.0, | |
| maximum=1.0, | |
| value=0.9, | |
| step=0.05 | |
| ) | |
| chat_interface = gr.ChatInterface( | |
| fn=lambda message, history, sys_prompt, temp, max_tok, top_p_val: generate_response( | |
| message, history, sys_prompt, temp, max_tok, top_p_val | |
| ), | |
| additional_inputs=[system_prompt, temperature, max_tokens, top_p], | |
| title="Chat with MobileLLM-Pro", | |
| description="Adjust parameters and chat with the model." | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |