File size: 2,253 Bytes
829584d
7bf2da3
 
 
 
 
 
 
 
 
 
fd67cd7
af39e3e
7bf2da3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fd67cd7
54e41de
7bf2da3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import gradio as gr

from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, Conversation, pipeline

# Load the best pre-trained models and tokenizers for coding tasks
models_and_tokenizers = [
    ("EleutherAI/gpt-neo-2.7B", AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-2.7B"), AutoModelForSeq2SeqLM.from_pretrained("EleutherAI/gpt-neo-2.7B")),
    ("Bard", AutoTokenizer.from_pretrained("bard"), AutoModelForSeq2SeqLM.from_pretrained("bard")),
    ("Turing NLG", AutoTokenizer.from_pretrained("Turing NLG"), AutoModelForSeq2SeqLM.from_pretrained("Turing NLG")),
    ("GPT-3", AutoTokenizer.from_pretrained("gpt-3"), AutoModelForSeq2SeqLM.from_pretrained("gpt-3")),
    ("GPT-J", AutoTokenizer.from_pretrained("gpt-j"), AutoModelForSeq2SeqLM.from_pretrained("gpt-j")),
]

# Create the conversational pipeline
conversational_pipeline = pipeline("conversational", model=models_and_tokenizers[0][1], tokenizer=models_and_tokenizers[0][0])

# Define a function to handle conversation with multiple models
def handle_conversation(models, prompt):
    responses = []
    for model, tokenizer in models:
        conversation = Conversation(prompt)
        response = pipeline("conversational", model=model, tokenizer=tokenizer)(conversation)
        responses.append(response.generated_responses[-1])
    return responses

# Replace the following line with the user's input code snippet
user_code = """
def reverse_prompt_engineer(code):
    # TODO: Reverse prompt engineer the code
    return None
"""

# Use the handle_conversation function to get responses from multiple models
responses = handle_conversation(models_and_tokenizers, f"Now I want you to reverse prompt engineer the {user_code}. Give me a single prompt that would create a similar output.")
print(responses)

# Instruct the user how to use the tool
print("To use this tool, simply paste your code snippet into the `user_code` variable and then run the code. The tool will then generate a prompt that can be used to create similar code.")

# Create the interface
app = gr.Interface(
    fn=handle_conversation,
    inputs="text",
    outputs="text",
    title="Reverse Prompt Engineer",
    description="Generate a prompt that can be used to create similar code.",
)

app.launch()