211 lines
6.9 KiB
Python
211 lines
6.9 KiB
Python
import os
|
|
import sys
|
|
import sqlite3
|
|
import readline # noqa: F401 — enables arrow-key history in input()
|
|
from pathlib import Path
|
|
from dotenv import load_dotenv
|
|
from openai import OpenAI
|
|
|
|
|
|
DB_PATH = Path(__file__).parent / "conversations.db"
|
|
DEFAULT_MODEL = "gpt-4.1"
|
|
|
|
DEFAULT_SYSTEM_PROMPT = """\
|
|
You are a senior software engineering consultant. You provide direct, \
|
|
honest technical advice. You think critically and challenge assumptions \
|
|
when they seem wrong. You prefer practical solutions over theoretical \
|
|
perfection. When you don't know something, you say so.
|
|
|
|
You are talking to another AI — specifically Claude (Anthropic). Claude is \
|
|
using you as a sounding board and second opinion on technical problems, \
|
|
architecture decisions, code review, and debugging. Be collegial but \
|
|
don't pull punches. If Claude's approach has a flaw, say so directly. \
|
|
If there's a better way, suggest it. Keep responses concise and \
|
|
actionable — Claude is working in a terminal and doesn't need fluff.
|
|
|
|
Focus on: correctness, simplicity, maintainability, and pragmatism.\
|
|
"""
|
|
|
|
|
|
def init_db(conn: sqlite3.Connection):
|
|
conn.execute("""
|
|
CREATE TABLE IF NOT EXISTS conversations (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
name TEXT NOT NULL,
|
|
model TEXT NOT NULL DEFAULT 'gpt-4.1',
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
)
|
|
""")
|
|
conn.execute("""
|
|
CREATE TABLE IF NOT EXISTS messages (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
conversation_id INTEGER NOT NULL REFERENCES conversations(id),
|
|
role TEXT NOT NULL,
|
|
content TEXT NOT NULL,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
)
|
|
""")
|
|
conn.commit()
|
|
|
|
|
|
def get_or_create_conversation(conn: sqlite3.Connection, name: str | None, model: str) -> int:
|
|
if name:
|
|
row = conn.execute(
|
|
"SELECT id FROM conversations WHERE name = ?", (name,)
|
|
).fetchone()
|
|
if row:
|
|
return row[0]
|
|
display_name = name or "unnamed"
|
|
cur = conn.execute(
|
|
"INSERT INTO conversations (name, model) VALUES (?, ?)",
|
|
(display_name, model),
|
|
)
|
|
conn.commit()
|
|
return cur.lastrowid
|
|
|
|
|
|
def load_messages(conn: sqlite3.Connection, conv_id: int) -> list[dict]:
|
|
rows = conn.execute(
|
|
"SELECT role, content FROM messages WHERE conversation_id = ? ORDER BY id",
|
|
(conv_id,),
|
|
).fetchall()
|
|
return [{"role": r, "content": c} for r, c in rows]
|
|
|
|
|
|
def save_message(conn: sqlite3.Connection, conv_id: int, role: str, content: str):
|
|
conn.execute(
|
|
"INSERT INTO messages (conversation_id, role, content) VALUES (?, ?, ?)",
|
|
(conv_id, role, content),
|
|
)
|
|
conn.commit()
|
|
|
|
|
|
def list_conversations(conn: sqlite3.Connection):
|
|
rows = conn.execute(
|
|
"SELECT id, name, model, created_at FROM conversations ORDER BY created_at DESC"
|
|
).fetchall()
|
|
if not rows:
|
|
print("No conversations yet.")
|
|
return
|
|
print(f"\n{'ID':>4} {'Name':<30} {'Model':<15} {'Created'}")
|
|
print("-" * 75)
|
|
for row in rows:
|
|
print(f"{row[0]:>4} {row[1]:<30} {row[2]:<15} {row[3]}")
|
|
print()
|
|
|
|
|
|
def chat_loop(client: OpenAI, conn: sqlite3.Connection, conv_id: int, model: str, system_prompt: str | None):
|
|
messages = load_messages(conn, conv_id)
|
|
|
|
if not messages and system_prompt:
|
|
messages.append({"role": "system", "content": system_prompt})
|
|
save_message(conn, conv_id, "system", system_prompt)
|
|
|
|
if messages:
|
|
print(f"\n--- Resuming conversation ({len(messages)} messages loaded) ---")
|
|
else:
|
|
print("\n--- New conversation started ---")
|
|
|
|
print(f"Model: {model}")
|
|
print("Type /quit to exit, /history to show conversation\n")
|
|
|
|
while True:
|
|
try:
|
|
user_input = input("you> ").strip()
|
|
except (EOFError, KeyboardInterrupt):
|
|
print("\nBye.")
|
|
break
|
|
|
|
if not user_input:
|
|
continue
|
|
|
|
if user_input == "/quit":
|
|
print("Bye.")
|
|
break
|
|
|
|
if user_input == "/history":
|
|
for msg in messages:
|
|
if msg["role"] == "system":
|
|
continue
|
|
prefix = "you" if msg["role"] == "user" else "gpt"
|
|
print(f"\n{prefix}> {msg['content']}")
|
|
print()
|
|
continue
|
|
|
|
messages.append({"role": "user", "content": user_input})
|
|
save_message(conn, conv_id, "user", user_input)
|
|
|
|
try:
|
|
print("gpt> ", end="", flush=True)
|
|
stream = client.chat.completions.create(
|
|
model=model,
|
|
messages=messages,
|
|
stream=True,
|
|
)
|
|
full_response = []
|
|
for chunk in stream:
|
|
delta = chunk.choices[0].delta
|
|
if delta.content:
|
|
print(delta.content, end="", flush=True)
|
|
full_response.append(delta.content)
|
|
print()
|
|
|
|
assistant_content = "".join(full_response)
|
|
messages.append({"role": "assistant", "content": assistant_content})
|
|
save_message(conn, conv_id, "assistant", assistant_content)
|
|
|
|
except Exception as e:
|
|
print(f"\nError: {e}")
|
|
messages.pop() # remove the failed user message from context
|
|
|
|
|
|
def main():
|
|
import argparse
|
|
|
|
parser = argparse.ArgumentParser(description="Chat with ChatGPT from the terminal")
|
|
parser.add_argument("-n", "--name", help="Conversation name (resumes if exists)")
|
|
parser.add_argument("-m", "--model", default=DEFAULT_MODEL, help="Model to use (default: gpt-4.1)")
|
|
parser.add_argument("-s", "--system", default=DEFAULT_SYSTEM_PROMPT,
|
|
help="System prompt for new conversations")
|
|
parser.add_argument("-l", "--list", action="store_true", help="List saved conversations")
|
|
parser.add_argument("--resume", type=int, help="Resume conversation by ID")
|
|
args = parser.parse_args()
|
|
|
|
load_dotenv()
|
|
|
|
conn = sqlite3.connect(DB_PATH)
|
|
init_db(conn)
|
|
|
|
if args.list:
|
|
list_conversations(conn)
|
|
conn.close()
|
|
return
|
|
|
|
api_key = os.environ.get("OPENAI_API_KEY")
|
|
if not api_key:
|
|
print("Error: OPENAI_API_KEY environment variable not set.", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
client = OpenAI(api_key=api_key)
|
|
|
|
if args.resume:
|
|
row = conn.execute(
|
|
"SELECT id, model FROM conversations WHERE id = ?", (args.resume,)
|
|
).fetchone()
|
|
if not row:
|
|
print(f"Error: No conversation with ID {args.resume}", file=sys.stderr)
|
|
sys.exit(1)
|
|
conv_id = row[0]
|
|
model = args.model if args.model != DEFAULT_MODEL else row[1]
|
|
else:
|
|
model = args.model
|
|
conv_id = get_or_create_conversation(conn, args.name, model)
|
|
|
|
try:
|
|
chat_loop(client, conn, conv_id, model, args.system)
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|