# app.py from fastapi import FastAPI, Request from pydantic import BaseModel from duckduckgo_search import DDGS import requests from bs4 import BeautifulSoup from together import Together import os # Your Together API Key os.environ["TOGETHER_API_KEY"] = "tgp_v1_fviFjIVOiQFgzx40nMG-q_8mrWpocP8VzWBZMLH1FbA" app = FastAPI() client = Together() class Query(BaseModel): query: str def get_web_context(prompt: str) -> dict: results = [] with DDGS() as ddgs: for r in ddgs.text(prompt, max_results=3): if r.get("href"): results.append(r["href"]) if len(results) == 3: break context = "" headers = {"User-Agent": "Mozilla/5.0"} for url in results: try: res = requests.get(url, headers=headers, timeout=10) soup = BeautifulSoup(res.content, "html.parser") text = soup.get_text(separator=' ', strip=True) context += f"\nFrom {url}:\n{text[:2000]}\n" except Exception as e: context += f"\nFrom {url}:\nError retrieving content: {e}\n" return context.strip() @app.post("/api/search") async def search(q: Query): web_data = get_web_context(q.query) messages = [ {"role": "system", "content": "You are a helpful search assistant that reads from the following web sources to answer user questions."}, {"role": "user", "content": f"{q.query}\n\nHere is some context:\n{web_data}"} ] try: response = client.chat.completions.create( model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free", messages=messages ) return {"answer": response.choices[0].message.content} except Exception as e: return {"answer": f"Error: {str(e)}"}