More refine
This commit is contained in:
parent
f56ffe480d
commit
31335cc0cb
29
textmagicscripts/generate_flow
Executable file
29
textmagicscripts/generate_flow
Executable file
@ -0,0 +1,29 @@
|
|||||||
|
#!/home/hollorol/miniconda3/envs/llm/bin/python
|
||||||
|
|
||||||
|
import subprocess as sp
|
||||||
|
from openai import OpenAI
|
||||||
|
import sys
|
||||||
|
|
||||||
|
API_KEY = sp.getoutput("pass show openai_apikey")
|
||||||
|
client = OpenAI(api_key=API_KEY)
|
||||||
|
temperature = 0.4
|
||||||
|
|
||||||
|
def generate(text_message):
|
||||||
|
prompt = f"""
|
||||||
|
You are an AI that generates text based on the prompt. You have to use the user provided text as a base, all text should be continiuos and coherent.
|
||||||
|
"""
|
||||||
|
|
||||||
|
response = client.chat.completions.create(
|
||||||
|
model="gpt-4o-mini",
|
||||||
|
messages=[{"role": "system", "content": prompt},
|
||||||
|
{"role": "user", "content": text_message}],
|
||||||
|
temperature=temperature # Adjust for creativity
|
||||||
|
)
|
||||||
|
|
||||||
|
response = response.choices[0].message.content
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
text_message = sys.stdin.read().strip()
|
||||||
|
generate(text_message)
|
||||||
|
|
||||||
98
textmagicscripts/refine_eng
Executable file
98
textmagicscripts/refine_eng
Executable file
@ -0,0 +1,98 @@
|
|||||||
|
#!/home/hollorol/miniconda3/envs/llm/bin/python
|
||||||
|
|
||||||
|
import subprocess as sp
|
||||||
|
from openai import OpenAI
|
||||||
|
import tkinter as tk
|
||||||
|
import sys
|
||||||
|
|
||||||
|
API_KEY = sp.getoutput("pass show openai_apikey")
|
||||||
|
client = OpenAI(api_key=API_KEY)
|
||||||
|
temperature = 0.4
|
||||||
|
|
||||||
|
def get_prompt_parameters():
|
||||||
|
"""
|
||||||
|
Opens a GUI for entering prompt parameters.
|
||||||
|
Returns a tuple: (goal, target, modalities)
|
||||||
|
The GUI is destroyed after submission.
|
||||||
|
"""
|
||||||
|
def submit(event=None):
|
||||||
|
"""Retrieve values and close GUI"""
|
||||||
|
nonlocal result
|
||||||
|
result = (goal_entry.get(), target_entry.get(), modalities_entry.get())
|
||||||
|
root.destroy()
|
||||||
|
|
||||||
|
result = [] # Placeholder for collected input
|
||||||
|
root = tk.Tk()
|
||||||
|
root.title("Enter Prompt Parameters")
|
||||||
|
# Force window to be always on top (like rofi)
|
||||||
|
root.attributes('-topmost', True)
|
||||||
|
|
||||||
|
# Set window type for DWM to recognize as floating
|
||||||
|
root.attributes('-type', 'dialog') # Works on Linux X11
|
||||||
|
root.wm_attributes('-type', 'dialog')
|
||||||
|
|
||||||
|
# Labels and input fields
|
||||||
|
tk.Label(root, text="Goal:").grid(row=0, column=0, padx=5, pady=5, sticky="e")
|
||||||
|
goal_entry = tk.Entry(root, width=40)
|
||||||
|
goal_entry.grid(row=0, column=1, padx=5, pady=5)
|
||||||
|
|
||||||
|
tk.Label(root, text="Target:").grid(row=1, column=0, padx=5, pady=5, sticky="e")
|
||||||
|
target_entry = tk.Entry(root, width=40)
|
||||||
|
target_entry.grid(row=1, column=1, padx=5, pady=5)
|
||||||
|
|
||||||
|
tk.Label(root, text="Modalities:").grid(row=2, column=0, padx=5, pady=5, sticky="e")
|
||||||
|
modalities_entry = tk.Entry(root, width=40)
|
||||||
|
modalities_entry.grid(row=2, column=1, padx=5, pady=5)
|
||||||
|
modalities_entry.bind("<Return>", submit)
|
||||||
|
# Submit button
|
||||||
|
submit_button = tk.Button(root, text="Submit", command=submit)
|
||||||
|
submit_button.grid(row=3, column=0, columnspan=2, pady=10)
|
||||||
|
|
||||||
|
def center_window():
|
||||||
|
root.update_idletasks() # Ensure window size is calculated
|
||||||
|
screen_width = root.winfo_screenwidth()
|
||||||
|
screen_height = root.winfo_screenheight()
|
||||||
|
window_width = root.winfo_width()
|
||||||
|
window_height = root.winfo_height()
|
||||||
|
|
||||||
|
x = (screen_width - window_width) // 2
|
||||||
|
y = (screen_height - window_height) // 2
|
||||||
|
|
||||||
|
root.geometry(f"{window_width}x{window_height}+{x}+{y}")
|
||||||
|
|
||||||
|
# Set initial focus to the first input field
|
||||||
|
root.after(100, lambda: goal_entry.focus_set())
|
||||||
|
|
||||||
|
# Wait a bit to center after window initializes
|
||||||
|
root.after(100, center_window)
|
||||||
|
|
||||||
|
|
||||||
|
root.mainloop() # Open the GUI and block execution until closed
|
||||||
|
return result # Return collected values
|
||||||
|
|
||||||
|
|
||||||
|
def refine_text(text_message):
|
||||||
|
goal, target_audience, tone = get_prompt_parameters()
|
||||||
|
prompt = f"""
|
||||||
|
You are an AI that refines text according to structured input.
|
||||||
|
Given the following parameters (if applicable):
|
||||||
|
- **Goal**: {goal}
|
||||||
|
- **Target**: {target_audience}
|
||||||
|
- **Tone**: {tone}
|
||||||
|
Provide only the refined version of the text. Do not include any explanations, introductions, or closing remarks—only return the improved text.
|
||||||
|
"""
|
||||||
|
|
||||||
|
response = client.chat.completions.create(
|
||||||
|
model="gpt-4o-mini",
|
||||||
|
messages=[{"role": "system", "content": prompt},
|
||||||
|
{"role": "user", "content": text_message}],
|
||||||
|
temperature=temperature # Adjust for creativity
|
||||||
|
)
|
||||||
|
|
||||||
|
response = response.choices[0].message.content
|
||||||
|
print(response)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
text_message = sys.stdin.read().strip()
|
||||||
|
refine_text(text_message)
|
||||||
|
|
||||||
Loading…
Reference in New Issue
Block a user