krim798
commited on
Made a few agent tools, hopefully passes
Browse files- .gitignore +1 -0
- .python-version +1 -0
- app.py +109 -3
- main.py +6 -0
- pyproject.toml +17 -0
- uv.lock +0 -0
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
.env
|
.python-version
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
3.11
|
app.py
CHANGED
|
@@ -3,21 +3,127 @@ import gradio as gr
|
|
| 3 |
import requests
|
| 4 |
import inspect
|
| 5 |
import pandas as pd
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
# (Keep Constants as is)
|
| 8 |
# --- Constants ---
|
| 9 |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
| 10 |
|
| 11 |
# --- Basic Agent Definition ---
|
| 12 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
|
|
|
|
|
|
| 13 |
class BasicAgent:
|
| 14 |
def __init__(self):
|
| 15 |
print("BasicAgent initialized.")
|
|
|
|
| 16 |
def __call__(self, question: str) -> str:
|
| 17 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
| 23 |
"""
|
|
|
|
| 3 |
import requests
|
| 4 |
import inspect
|
| 5 |
import pandas as pd
|
| 6 |
+
from firecrawl import FirecrawlApp
|
| 7 |
+
from langchain_community.document_loaders.firecrawl import FireCrawlLoader
|
| 8 |
+
from langchain_core.tools import tool
|
| 9 |
+
from dotenv import load_dotenv
|
| 10 |
+
import wikipedia
|
| 11 |
+
from datetime import datetime
|
| 12 |
|
| 13 |
+
@tool
|
| 14 |
+
def current_datetime(_: str = "") -> str:
|
| 15 |
+
"""
|
| 16 |
+
Returns the current date and time.
|
| 17 |
+
"""
|
| 18 |
+
return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
| 19 |
+
@tool
|
| 20 |
+
def calculator(expression: str) -> str:
|
| 21 |
+
"""
|
| 22 |
+
Evaluates a mathematical expression and returns the result.
|
| 23 |
+
"""
|
| 24 |
+
try:
|
| 25 |
+
result = eval(expression, {"__builtins__": {}})
|
| 26 |
+
return str(result)
|
| 27 |
+
except Exception as e:
|
| 28 |
+
return f"Calculation error: {e}"
|
| 29 |
+
@tool
|
| 30 |
+
def wikipedia_search(query: str) -> str:
|
| 31 |
+
"""
|
| 32 |
+
Searches Wikipedia for the given query and returns the summary of the top result.
|
| 33 |
+
"""
|
| 34 |
+
try:
|
| 35 |
+
summary = wikipedia.summary(query, sentences=2)
|
| 36 |
+
return summary
|
| 37 |
+
except Exception as e:
|
| 38 |
+
return f"Wikipedia search failed: {e}"
|
| 39 |
+
load_dotenv()
|
| 40 |
+
loader=FireCrawlLoader(
|
| 41 |
+
api_key=os.environ.get('FIRECRAWL_API_KEY'),url='https://firecrawl.dev',mode='scrape')
|
| 42 |
+
api_key=os.environ.get('FIRECRAWL_API_KEY')
|
| 43 |
+
@tool
|
| 44 |
+
def scraper(url:str)->str:
|
| 45 |
+
"""
|
| 46 |
+
Uses Firecrawl to scrape the content of the given url.
|
| 47 |
+
"""
|
| 48 |
+
loader=FireCrawlLoader(
|
| 49 |
+
api_key=api_key,
|
| 50 |
+
url=url,
|
| 51 |
+
mode='scrape'
|
| 52 |
+
)
|
| 53 |
+
docs=loader.load()
|
| 54 |
+
if docs:
|
| 55 |
+
return docs[0].page_content[:1000]
|
| 56 |
+
return "No content found or failed to scrape."
|
| 57 |
+
@tool
|
| 58 |
+
def web_search(query:str)->str:
|
| 59 |
+
"""
|
| 60 |
+
Uses firecrawl to search for the given query and returns the top result's snippet.
|
| 61 |
+
"""
|
| 62 |
+
app= FirecrawlApp(api_key=api_key)
|
| 63 |
+
results=app.search(query)
|
| 64 |
+
if results and results.get("results"):
|
| 65 |
+
top=results["results"][0]
|
| 66 |
+
return f"{top.get('title','')}:{top.get('snippet','')}({top.get('url','')})"
|
| 67 |
+
return "No results found"
|
| 68 |
# (Keep Constants as is)
|
| 69 |
# --- Constants ---
|
| 70 |
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
| 71 |
|
| 72 |
# --- Basic Agent Definition ---
|
| 73 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
| 74 |
+
|
| 75 |
+
|
| 76 |
class BasicAgent:
|
| 77 |
def __init__(self):
|
| 78 |
print("BasicAgent initialized.")
|
| 79 |
+
|
| 80 |
def __call__(self, question: str) -> str:
|
| 81 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
| 82 |
+
|
| 83 |
+
# 1. Calculator logic
|
| 84 |
+
calc_keywords = ["calculate", "compute", "evaluate", "+", "-", "*", "/", "^", "sqrt", "log", "sum", "product"]
|
| 85 |
+
if any(kw in question.lower() for kw in calc_keywords):
|
| 86 |
+
try:
|
| 87 |
+
return calculator(question)
|
| 88 |
+
except Exception as e:
|
| 89 |
+
print(f"Calculator tool failed: {e}")
|
| 90 |
+
|
| 91 |
+
# 2. Date/time logic
|
| 92 |
+
datetime_keywords = ["date", "time", "day", "month", "year", "current time", "current date"]
|
| 93 |
+
if any(kw in question.lower() for kw in datetime_keywords):
|
| 94 |
+
try:
|
| 95 |
+
return current_datetime()
|
| 96 |
+
except Exception as e:
|
| 97 |
+
print(f"Datetime tool failed: {e}")
|
| 98 |
+
|
| 99 |
+
# 3. Wikipedia logic
|
| 100 |
+
if "wikipedia" in question.lower() or "wiki" in question.lower():
|
| 101 |
+
try:
|
| 102 |
+
# Remove "wikipedia" or "wiki" from the question for better search
|
| 103 |
+
cleaned = question.lower().replace("wikipedia", "").replace("wiki", "").strip()
|
| 104 |
+
return wikipedia_search(cleaned if cleaned else question)
|
| 105 |
+
except Exception as e:
|
| 106 |
+
print(f"Wikipedia tool failed: {e}")
|
| 107 |
+
|
| 108 |
+
# 4. Web search + scrape logic
|
| 109 |
+
try:
|
| 110 |
+
search_result = web_search(question)
|
| 111 |
+
# Try to extract a URL from the search result for scraping
|
| 112 |
+
import re
|
| 113 |
+
url_match = re.search(r"\((https?://[^\s)]+)\)", search_result)
|
| 114 |
+
if url_match:
|
| 115 |
+
url = url_match.group(1)
|
| 116 |
+
scraped = scraper(url)
|
| 117 |
+
# Combine search snippet and scraped content for a richer answer
|
| 118 |
+
return f"{search_result}\n\nScraped content:\n{scraped}"
|
| 119 |
+
else:
|
| 120 |
+
return search_result
|
| 121 |
+
except Exception as e:
|
| 122 |
+
print(f"Web search/scraper tool failed: {e}")
|
| 123 |
+
return "Sorry, I couldn't find an answer."
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
|
| 127 |
|
| 128 |
def run_and_submit_all( profile: gr.OAuthProfile | None):
|
| 129 |
"""
|
main.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def main():
|
| 2 |
+
print("Hello from final-assignment-template!")
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
if __name__ == "__main__":
|
| 6 |
+
main()
|
pyproject.toml
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
name = "final-assignment-template"
|
| 3 |
+
version = "0.1.0"
|
| 4 |
+
description = "Add your description here"
|
| 5 |
+
readme = "README.md"
|
| 6 |
+
requires-python = ">=3.11"
|
| 7 |
+
dependencies = [
|
| 8 |
+
"dotenv>=0.9.9",
|
| 9 |
+
"firecrawl-py>=2.12.0",
|
| 10 |
+
"gradio>=5.35.0",
|
| 11 |
+
"huggingface-hub>=0.33.1",
|
| 12 |
+
"langchain>=0.3.26",
|
| 13 |
+
"langchain-community>=0.3.26",
|
| 14 |
+
"requests>=2.32.4",
|
| 15 |
+
"ruff>=0.12.1",
|
| 16 |
+
"wikipedia>=1.4.0",
|
| 17 |
+
]
|
uv.lock
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|