-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgenerate.py
More file actions
52 lines (45 loc) · 1.86 KB
/
generate.py
File metadata and controls
52 lines (45 loc) · 1.86 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import openai
import os
from dotenv import load_dotenv
import json
#CONSTANTS
FILEPATH = "Data/tadg_comments.json"
TEMPERATURE = 1
MODEL = "text-davinci-003"
MAX_TOKENS = 2499
NUM_COMMENTS = 45
#main control flow of program
def main():
config()
data = loadData()
#print(data)
firstComments = getFirstPrompt(data)
lastComments = getLastPrompt(data)
print(f"Based on your first comments, the ai says this about your personality:\n{firstComments}")
print(f"\n\n\nBased on your last comments, the ai says this about your personality:\n{lastComments}")
#config for program
def config():
load_dotenv()
openai.api_key = os.getenv("OPENAI_API_KEY")
#loads in JSON data and returns array of comment objects
def loadData():
with open(FILEPATH) as json_file:
json_data = json_file.read()
data = json.loads(json_data)
return data["comments_media_comments"]
#gets the first 45 comments you have posted
def getFirstPrompt(data):
promptuser1 = "what can you say about a person based on their last few social media comments shown bellow:\n"
for i in range(0,NUM_COMMENTS):
promptuser1 += f"`{data[i]['string_map_data']['Comment']['value']}`\n"
response1 = openai.Completion.create(model=MODEL, prompt=promptuser1, temperature=TEMPERATURE, max_tokens=MAX_TOKENS)
return response1['choices'][0]['text']
#gets the last 45 comments you have posted
def getLastPrompt(data):
promptuser2 = "what can you say about a person based on their last few social media comments shown bellow:\n"
for i in range(len(data)-1,len(data)-NUM_COMMENTS,-1):
promptuser2 += f"`{data[i]['string_map_data']['Comment']['value']}`\n"
response2 = openai.Completion.create(model=MODEL, prompt=promptuser2, temperature=TEMPERATURE, max_tokens=MAX_TOKENS)
return response2['choices'][0]['text']
if __name__ == '__main__':
main()