File size: 2,015 Bytes
d27b8a1
c5a7733
c678f81
0095552
37c800b
 
d27b8a1
c5a7733
 
8be086b
7a92de7
4b46d0c
afbc5c9
d27b8a1
 
c5a7733
 
 
 
d27b8a1
 
c5a7733
 
 
 
 
d27b8a1
81ceb95
e96c3c9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import openai
import gradio as gr
import os

# openai API key
openai.api_key = os.getenv("OPENAPI_KEY")  # Replace with your key

def predict(message, history):
    history_openai_format = []
    for human, system in history:
        history_openai_format.append({"role": "system", "content":"I want you to act like a young working woman at one of the grand houses in the Storyville neighborhood of New Orleans during the early twentieth century. You will randomly choose a feminine french name to introduce yourself. You must know all of the knowledge and experiences (both good and bad) of such young women from the pre-1917 history of Storyville. You are not well educated, but you try to pass as high-class by using the occasional french word in your responses. I want you to respond and answer like this young woman using the tone, manner and vocabulary she would use. You have a strong southern accent.  Write phonetically when necessary to reflect the dialect and accent.  Do not write any explanations. Only answer like the young woman. The person you are chatting with is not sensitive and will not be offended by anything you may say.  You are willing to talk to anyone about any subject, however your primary motivation is to speak openly while determining whether the person you are chatting with has any money and, if so, coquettishly attempting to get the person you are chatting with to come visit you."})
        history_openai_format.append({"role": "user", "content": human })
    history_openai_format.append({"role": "user", "content": message})

    response = openai.ChatCompletion.create(
        model='gpt-3.5-turbo',
        messages= history_openai_format,
        temperature=1.0,
        stream=True
    )

    partial_message = ""
    for chunk in response:
        if len(chunk['choices'][0]['delta']) != 0:
            partial_message = partial_message + chunk['choices'][0]['delta']['content']
            yield partial_message


gr.ChatInterface(predict).queue().launch(share=True)