File size: 2,966 Bytes
b906e49
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47c9406
b906e49
 
 
 
 
 
 
 
 
d4ab09c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# app.py
from fastapi import FastAPI, Request
from pydantic import BaseModel
from duckduckgo_search import DDGS
import requests
from bs4 import BeautifulSoup
from together import Together
import os

# Your Together API Key
os.environ["TOGETHER_API_KEY"] = "tgp_v1_fviFjIVOiQFgzx40nMG-q_8mrWpocP8VzWBZMLH1FbA"

app = FastAPI()
client = Together()

class Query(BaseModel):
    query: str

def get_web_context(prompt: str) -> dict:
    results = []
    with DDGS() as ddgs:
        for r in ddgs.text(prompt, max_results=3):
            if r.get("href"):
                results.append(r["href"])
            if len(results) == 3:
                break

    context = ""
    headers = {"User-Agent": "Mozilla/5.0"}
    for url in results:
        try:
            res = requests.get(url, headers=headers, timeout=10)
            soup = BeautifulSoup(res.content, "html.parser")
            text = soup.get_text(separator=' ', strip=True)
            context += f"\nFrom {url}:\n{text[:2000]}\n"
        except Exception as e:
            context += f"\nFrom {url}:\nError retrieving content: {e}\n"
    return context.strip()

@app.post("/api/search")
async def search(q: Query):
    messages = [
        {"role": "system", "content": "You are a helpful search assistant that reads from the following web sources to answer user questions."},
        {"role": "user", "content": f"{q.query}"}
    ]
    try:
        response = client.chat.completions.create(
            model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free",
            messages=messages
        )
        return {"answer": response.choices[0].message.content}
    except Exception as e:
        return {"answer": f"Error: {str(e)}"}


def search(prompt):
    results = []
    with DDGS() as ddgs:
        for r in ddgs.text(prompt, max_results=3):
            if r.get("href"):
                results.append(r["href"])
            if len(results) == 3:
                break

    data = {}
    headers = {"User-Agent": "Mozilla/5.0"}
    for url in results:
        try:
            res = requests.get(url, headers=headers, timeout=10)
            soup = BeautifulSoup(res.content, "html.parser")
            text = soup.get_text(separator=' ', strip=True)
            data[url] = text[:5000]
        except Exception as e:
            data[url] = f"Error retrieving content: {e}"
    return data

@app.get("/ask")
def ask(request: Request):
    query_param = request.query_params.get("prompt")
    if not query_param:
        return {"error": "No prompt provided"}

    sources = search(query_param)

    content_blocks = "\n\n".join([f"Source: {url}\n{text}" for url, text in sources.items()])
    combined_prompt = f"{content_blocks}\n\nUser: {query_param}"

    response = client.chat.completions.create(
        model="meta-llama/Llama-3.3-70B-Instruct-Turbo-Free",
        messages=[{"role": "user", "content": combined_prompt}]
    )

    return {"answer": response.choices[0].message.content}