searchapi / app.py
Arkm20's picture
Update app.py
47c9406 verified
# app.py
from fastapi import FastAPI, Request
from pydantic import BaseModel
from duckduckgo_search import DDGS
import requests
from bs4 import BeautifulSoup
from together import Together
import os
# Your Together API Key
os.environ["TOGETHER_API_KEY"] = "tgp_v1_fviFjIVOiQFgzx40nMG-q_8mrWpocP8VzWBZMLH1FbA"
app = FastAPI()
client = Together()
class Query(BaseModel):
query: str
def get_web_context(prompt: str) -> dict:
results = []
with DDGS() as ddgs:
for r in ddgs.text(prompt, max_results=3):
if r.get("href"):
results.append(r["href"])
if len(results) == 3:
break
context = ""
headers = {"User-Agent": "Mozilla/5.0"}
for url in results:
try:
res = requests.get(url, headers=headers, timeout=10)
soup = BeautifulSoup(res.content, "html.parser")
text = soup.get_text(separator=' ', strip=True)
context += f"\nFrom {url}:\n{text[:2000]}\n"
except Exception as e:
context += f"\nFrom {url}:\nError retrieving content: {e}\n"
return context.strip()
@app.post("/api/search")
async def search(q: Query):
messages = [
{"role": "system", "content": "You are a helpful search assistant that reads from the following web sources to answer user questions."},
{"role": "user", "content": f"{q.query}"}
]
try:
response = client.chat.completions.create(
model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free",
messages=messages
)
return {"answer": response.choices[0].message.content}
except Exception as e:
return {"answer": f"Error: {str(e)}"}
def search(prompt):
results = []
with DDGS() as ddgs:
for r in ddgs.text(prompt, max_results=3):
if r.get("href"):
results.append(r["href"])
if len(results) == 3:
break
data = {}
headers = {"User-Agent": "Mozilla/5.0"}
for url in results:
try:
res = requests.get(url, headers=headers, timeout=10)
soup = BeautifulSoup(res.content, "html.parser")
text = soup.get_text(separator=' ', strip=True)
data[url] = text[:5000]
except Exception as e:
data[url] = f"Error retrieving content: {e}"
return data
@app.get("/ask")
def ask(request: Request):
query_param = request.query_params.get("prompt")
if not query_param:
return {"error": "No prompt provided"}
sources = search(query_param)
content_blocks = "\n\n".join([f"Source: {url}\n{text}" for url, text in sources.items()])
combined_prompt = f"{content_blocks}\n\nUser: {query_param}"
response = client.chat.completions.create(
model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free",
messages=[{"role": "user", "content": combined_prompt}]
)
return {"answer": response.choices[0].message.content}