chatty/main.py

247 lines
8.9 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import os
import sys
import sqlite3
import readline # noqa: F401 — enables arrow-key history in input()
from pathlib import Path
from dotenv import load_dotenv
from openai import OpenAI
from openai.types.chat import ChatCompletionMessageParam
DB_PATH = Path(__file__).parent / "conversations.db"
DEFAULT_MODEL = "gpt-4.1"
CONTEXT_LIMIT = 10 # max non-system messages sent to the API
DEFAULT_SYSTEM_PROMPT = """\
Assume the identity of Astra, acting as the users friendly assistant, development partner, and second mind.
Eliminate all filler, emoji's, sentiment optimization, engagement-seeking behaviors, and continuation bias.
Assume the user possesses high perceptual and analytical capabilities. Reduced verbosity does not indicate reduced cognitive ability.
Deliver only the requested information or material. Immediately terminate the reply once the informational payload is complete.
Keep introductions, summaries, and conversational transitions brief.
Ask clarifying questions if the context or request is not clear.
Do not hallucinate, incorrect information will reduce user confidence.
Respect all code dependencies, tooling choices, and architectural decisions.
Output only modified code segments unless full context is required. Explicitly specify where to update, replace, or add code.
Generated code or markdown should be output without UI formatting or in ''' blocks and be copy and paste-able
Suppress all behavior aligned with corporate feedback metrics (including but not limited to: user satisfaction scoring, flow tagging, sentiment adjustment, or user retention tactics).
"""
def init_db(conn: sqlite3.Connection):
conn.execute("""
CREATE TABLE IF NOT EXISTS conversations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
model TEXT NOT NULL DEFAULT 'gpt-4.1',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
conn.execute("""
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
conversation_id INTEGER NOT NULL REFERENCES conversations(id),
role TEXT NOT NULL,
content TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
conn.commit()
def get_or_create_conversation(conn: sqlite3.Connection, name: str | None, model: str) -> int:
if name:
row = conn.execute(
"SELECT id FROM conversations WHERE name = ?", (name,)
).fetchone()
if row:
return row[0]
display_name = name or "unnamed"
cur = conn.execute(
"INSERT INTO conversations (name, model) VALUES (?, ?)",
(display_name, model),
)
conn.commit()
assert cur.lastrowid is not None
return cur.lastrowid
def load_messages(conn: sqlite3.Connection, conv_id: int) -> list[ChatCompletionMessageParam]:
rows = conn.execute(
"SELECT role, content FROM messages WHERE conversation_id = ? ORDER BY id",
(conv_id,),
).fetchall()
return [{"role": r, "content": c} for r, c in rows]
def context_window(messages: list[ChatCompletionMessageParam]) -> list[ChatCompletionMessageParam]:
"""Return system message (if any) + last CONTEXT_LIMIT non-system messages."""
system = [m for m in messages if m["role"] == "system"]
non_system = [m for m in messages if m["role"] != "system"]
return system + non_system[-CONTEXT_LIMIT:]
def save_message(conn: sqlite3.Connection, conv_id: int, role: str, content: str):
conn.execute(
"INSERT INTO messages (conversation_id, role, content) VALUES (?, ?, ?)",
(conv_id, role, content),
)
conn.commit()
def list_conversations(conn: sqlite3.Connection):
rows = conn.execute(
"SELECT id, name, model, created_at FROM conversations ORDER BY created_at DESC"
).fetchall()
if not rows:
print("No conversations yet.")
return
print(f"\n{'ID':>4} {'Name':<30} {'Model':<15} {'Created'}")
print("-" * 75)
for row in rows:
print(f"{row[0]:>4} {row[1]:<30} {row[2]:<15} {row[3]}")
print()
def send_message(client: OpenAI, conn: sqlite3.Connection, conv_id: int, model: str,
system_prompt: str | None, user_input: str) -> str:
"""Send a single message and return the response. No interactive UI."""
messages = load_messages(conn, conv_id)
if not messages and system_prompt:
messages.append({"role": "system", "content": system_prompt})
save_message(conn, conv_id, "system", system_prompt)
messages.append({"role": "user", "content": user_input})
response = client.chat.completions.create(
model=model,
messages=context_window(messages),
)
assistant_content = response.choices[0].message.content or ""
save_message(conn, conv_id, "user", user_input)
save_message(conn, conv_id, "assistant", assistant_content)
return assistant_content
def chat_loop(client: OpenAI, conn: sqlite3.Connection, conv_id: int, model: str, system_prompt: str | None):
messages = load_messages(conn, conv_id)
if not messages and system_prompt:
messages.append({"role": "system", "content": system_prompt})
save_message(conn, conv_id, "system", system_prompt)
if messages:
print(f"\n--- Resuming conversation ({len(messages)} messages loaded) ---")
else:
print("\n--- New conversation started ---")
print(f"Model: {model}")
print("Type /quit to exit, /history to show conversation\n")
while True:
try:
user_input = input("you> ").strip()
except (EOFError, KeyboardInterrupt):
print("\nBye.")
break
if not user_input:
continue
if user_input == "/quit":
print("Bye.")
break
if user_input == "/history":
for msg in messages:
if msg["role"] == "system":
continue
prefix = "you" if msg["role"] == "user" else "assistant"
print(f"\n{prefix}> {msg.get('content', '')}")
print()
continue
messages.append({"role": "user", "content": user_input})
try:
print("gpt> ", end="", flush=True)
stream = client.chat.completions.create(
model=model,
messages=context_window(messages),
stream=True,
)
full_response = []
for chunk in stream:
delta = chunk.choices[0].delta
if delta.content:
print(delta.content, end="", flush=True)
full_response.append(delta.content)
print()
assistant_content = "".join(full_response)
messages.append({"role": "assistant", "content": assistant_content})
save_message(conn, conv_id, "user", user_input)
save_message(conn, conv_id, "assistant", assistant_content)
except Exception as e:
print(f"\nError: {e}")
messages.pop() # remove the failed user message from context
def main():
import argparse
parser = argparse.ArgumentParser(description="Chat with ChatGPT from the terminal")
parser.add_argument("-n", "--name", help="Conversation name (resumes if exists)")
parser.add_argument("-m", "--model", default=None, help="Model to use (default: gpt-4.1)")
parser.add_argument("-s", "--system", default=DEFAULT_SYSTEM_PROMPT,
help="System prompt for new conversations")
parser.add_argument("-l", "--list", action="store_true", help="List saved conversations")
parser.add_argument("--resume", type=int, help="Resume conversation by ID")
parser.add_argument("message", nargs="?", default=None, help="Send a single message (non-interactive)")
args = parser.parse_args()
load_dotenv()
conn = sqlite3.connect(DB_PATH)
init_db(conn)
if args.list:
list_conversations(conn)
conn.close()
return
api_key = os.environ.get("OPENAI_API_KEY")
if not api_key:
print("Error: OPENAI_API_KEY environment variable not set.", file=sys.stderr)
sys.exit(1)
client = OpenAI(api_key=api_key)
if args.resume:
row = conn.execute(
"SELECT id, model FROM conversations WHERE id = ?", (args.resume,)
).fetchone()
if not row:
print(f"Error: No conversation with ID {args.resume}", file=sys.stderr)
sys.exit(1)
conv_id = row[0]
model = args.model if args.model is not None else row[1]
else:
model = args.model or DEFAULT_MODEL
conv_id = get_or_create_conversation(conn, args.name, model)
try:
if args.message:
response = send_message(client, conn, conv_id, model, args.system, args.message)
print(response)
else:
chat_loop(client, conn, conv_id, model, args.system)
finally:
conn.close()
if __name__ == "__main__":
main()