F1-Simple-Demo / app.py
likhonsheikh's picture
Update app.py
37783cb verified
import gradio as gr
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
import warnings
warnings.filterwarnings("ignore")
# Initialize model
model_name = "sheikh/Sheikh-F1"
device = "cuda" if torch.cuda.is_available() else "cpu"
print("Loading F-1 model...")
try:
tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side="left")
model = AutoModelForCausalLM.from_pretrained(model_name).to(device)
if tokenizer.pad_token is None:
tokenizer.pad_token = tokenizer.eos_token
print("F-1 model loaded successfully!")
def generate_code(prompt, language="English", max_length=200):
try:
# Add language prefix
if language == "Bengali":
prefix = "বাংলা স্ক্রিপ্ট: "
elif language == "Banglish":
prefix = "বাংলিশ: "
else:
prefix = "English: "
full_prompt = prefix + prompt
# Generate response
inputs = tokenizer(full_prompt, return_tensors="pt", padding=True, truncation=True)
input_ids = inputs["input_ids"].to(device)
attention_mask = inputs["attention_mask"].to(device)
with torch.no_grad():
outputs = model.generate(
input_ids=input_ids,
attention_mask=attention_mask,
max_new_tokens=max_length,
temperature=0.7,
do_sample=True,
pad_token_id=tokenizer.eos_token_id,
eos_token_id=tokenizer.eos_token_id,
repetition_penalty=1.1,
top_p=0.9,
top_k=50
)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
if response.startswith(full_prompt):
response = response[len(full_prompt):].strip()
return response
except Exception as e:
return f"Error: {str(e)}"
# Create interface
with gr.Blocks(title="F-1 Demo", theme=gr.themes.Soft()) as demo:
gr.Markdown("# 🤖 F-1: Multilingual Coding Assistant\n## মাল্টিলিঙ্গুয়াল কোডিং সহায়ক\nCreated by Likhon Sheikh 🇧🇩")
with gr.Row():
language = gr.Dropdown(["English", "Bengali", "Banglish"], label="Language")
max_length = gr.Slider(50, 500, 200, label="Response Length")
prompt = gr.Textbox(label="Enter coding request", lines=3)
output = gr.Textbox(label="Generated Code", lines=10)
btn = gr.Button("Generate", variant="primary")
btn.click(
fn=generate_code,
inputs=[prompt, language, max_length],
outputs=[output]
)
gr.Markdown("""
### Features:
- ✅ Multilingual support (English, Bengali, Banglish)
- ✅ Code generation in Python
- ✅ Designed for Bangladeshi developers
**Made with ❤️ by Likhon Sheikh**
""")
if __name__ == "__main__":
demo.queue().launch()
except Exception as e:
print(f"Error: {e}")
gr.Markdown("# F-1 Model Loading Error\nPlease check the model installation.")
# Fallback interface
demo = gr.Interface(
fn=lambda x: "Model loading failed. Please check the base model microsoft/DialoGPT-medium.",
inputs=gr.Textbox(label="Enter coding request"),
outputs=gr.Textbox(label="Generated Code"),
title="F-1 Demo (Fallback)"
)
if __name__ == "__main__":
demo.queue().launch()