from langchain.chat_models import ChatOpenAI #from langchain.llms import OpenAI from langchain.chains import ConversationChain from langchain.memory import ConversationBufferWindowMemory from langchain.prompts.prompt import PromptTemplate from langchain.schema import messages_from_dict, messages_to_dict from time import sleep import sys import os logfile = None def slowprint(s, logfile=None): for c in s+'\n': sys.stdout.write(c) sys.stdout.flush() sleep(0.01) if logfile: logfile.write(s+'\n') MODEL_NAME = "gpt-3.5-turbo-16k-0613" # Read prompt template template = open("prompt-template.txt").read().strip() PROMPT = PromptTemplate(input_variables=["history", "input"], template=template) memory = ConversationBufferWindowMemory(ai_prefix="DoctorK", human_prefix="JJ", k=64) for line in open("book-chats.txt").readlines(): v=[s.strip() for s in line.split(":")] if len(v)!=2: continue if v[0].lower()=="doc": memory.chat_memory.add_ai_message(v[1]) else: memory.chat_memory.add_user_message(v[1]) conversation = ConversationChain( prompt=PROMPT, llm=ChatOpenAI(model_name=MODEL_NAME), verbose=False, memory=memory ) sys.stderr.write("{esc}[2J{esc}[H".format(esc=chr(27))) # Clear screen ;) sys.stderr.flush() if len(sys.argv)>1: try: logfile = open(sys.argv[1], "w") except: slowprint("Couldn't open log file '{}'!!!\n".format(sys.argv[1])) slowprint("""===================================================================== Interactive Talk-Program loaded and started. Users: * [Doctor Kernel] * [JJ] <- you Hit to exit =====================================================================""", logfile) slowprint("Doctor Kernel: {}\n".format( conversation.predict(input="Hello again, doctor. Please remind me what we were talking about last time.")), logfile) while True: try: prompt = input("JJ: > ").strip() except EOFError: prompt = "" if not prompt: slowprint(""" ===================================================================== End of Talk =====================================================================""") sys.exit(0) if logfile: logfile.write("JJ: {}\n\n".format(prompt)) print('') # Skip a line slowprint("Doctor Kernel: {}\n".format(conversation.predict(input=prompt)), logfile)