File size: 2,101 Bytes
8d03d71
 
8062fef
8d03d71
29dff0c
8d03d71
55b1c45
8d03d71
 
8062fef
55b1c45
 
 
 
 
 
 
 
8d03d71
29dff0c
 
 
8062fef
 
 
2fd3433
 
8062fef
 
8d03d71
2fd3433
 
8d03d71
2fd3433
 
8d03d71
 
 
 
2fd3433
 
8d03d71
2fd3433
 
8d03d71
2fd3433
 
8d03d71
2fd3433
 
8d03d71
2fd3433
 
8d03d71
2fd3433
 
 
 
8062fef
 
8d03d71
2fd3433
 
8d03d71
 
 
2fd3433
 
 
 
8d03d71
 
 
 
 
 
 
 
2fd3433
8d03d71
8062fef
8d03d71
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
from fastapi import FastAPI
from fastapi.responses import FileResponse
from transformers import pipeline
from fpdf import FPDF
import os
import uvicorn
from fastapi.middleware.cors import CORSMiddleware

app = FastAPI()

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],   # Or put your frontend URL later for security
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# Hugging Face token (set this in your Space settings → Variables and secrets)
HF_TOKEN = os.getenv("HF_TOKEN")

# Load Gemma model
pipe = pipeline(
    "text-generation",
    model="google/gemma-3-270m-it",
    token=HF_TOKEN,
    max_new_tokens=400
)

def generate_resume_with_gemma(data):
    prompt = f"""
    Create a professional resume based on the following details.
    Write in clear sections (Education, Internships, Projects, Skills, Extra Curricular).

    Personal Information:
    - Name: {data.get('name')}
    - Email: {data.get('email')}
    - Phone: {data.get('phone')}
    - LinkedIn: {data.get('linkedin')}

    Education:
    {data.get('education')}

    Internships:
    {data.get('internships')}

    Projects:
    {data.get('projects')}

    Skills:
    {data.get('skills')}

    Extra Curricular:
    {data.get('extra')}

    Resume:
    """
    result = pipe(prompt, max_new_tokens=400, do_sample=False)
    return result[0]['generated_text']

def generate_resume_pdf(text):
    pdf = FPDF()
    pdf.add_page()
    pdf.set_font("Arial", size=12)
    for line in text.split("\n"):
        pdf.multi_cell(0, 8, line)
    filename = "resume.pdf"
    pdf.output(filename)
    return filename

@app.post("/generate_resume")
async def generate_resume(data: dict):
    # Step 1: Ask Gemma to polish resume text
    text = generate_resume_with_gemma(data)
    # Step 2: Generate PDF
    pdf_file = generate_resume_pdf(text)
    # Step 3: Return PDF file
    return FileResponse(pdf_file, media_type="application/pdf", filename="resume.pdf")

# For local debugging (not needed in HF Spaces)
if __name__ == "__main__":
    uvicorn.run(app, host="0.0.0.0", port=7860)