-
Notifications
You must be signed in to change notification settings - Fork 2
/
app.py
166 lines (136 loc) · 6.27 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
import logging
from flask import Flask, request, jsonify, Response, send_from_directory
from flask_cors import CORS
from openai import OpenAI
import datetime
import os
from dotenv import load_dotenv
load_dotenv()
# Configure logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
app = Flask(__name__, static_folder='build', static_url_path='/')
# This code breaks in Azure
# Load OpenAI API key from environment variable
# openai_api_key = os.getenv('OPENAI_API_KEY')
# if not openai_api_key:
# raise ValueError('Your OpenAI API key is not detected in your environment variables. Please check.')
# Initialize OpenAI client
openai_client = OpenAI(organization="org-R588VtVPiLayZlPfc2F0DyAI")
# Check if running in development environment
is_dev = os.getenv('FLASK_ENV') == 'development'
# CORS configuration for development: Allow all origins
if is_dev:
logging.info("WARNING: CORS enabled. Hope you're not in production ;)")
print("WARNING: CORS enabled. Hope you're not in production ;)")
from flask_cors import CORS
CORS(app, origins="*")
# CORS configuration for production: Allow only the origin specified in the environment variable
@app.route('/')
def index():
logging.info('Received request for index route')
return send_from_directory(app.static_folder, 'index.html')
@app.route('/refine/', methods=['POST', 'GET'])
def refine():
logging.info('Received request for /refine route')
data = request.get_json()
logging.debug(f'Received data: {data}')
details = data.get('details', '')
project = data.get('project', '')
subtasks = data.get('subtasks', [])
status = data.get('status', [])
instructions = f"""Based on your previous task that you broke for me for project: {project}, with details {details}.
I want you to fix some specific tasks that I am providing you. fix only this specific task and change nothing else.
"""
refined_subtasks = []
for subtask, task_status in zip(subtasks, status):
if task_status == 'keep':
refined_subtasks.append(subtask)
continue
elif task_status == 'remove':
continue # Skip this subtask
else:
prompt = f"This subtask needs work: \"{subtask}\". This is what I feel about it: {task_status}. fix it."
logging.info('Sending completion request to OpenAI API')
completion = openai_client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": instructions},
{"role": "user", "content": prompt}
]
)
logging.debug(f'Received response from OpenAI API: {completion.choices[0].message.content}')
refined_subtasks.append(completion.choices[0].message.content)
response_data = {
'details': details,
'project': project,
'subtasks': refined_subtasks
}
logging.info('Returning response with refined subtasks')
return jsonify(response_data), 200
@app.route('/response/', methods=['POST'])
def action():
logging.info('Received POST request for /response route')
data = request.get_json()
logging.debug(f'Received data: {data}')
project = data['project']
details = data['details']
# Prepare prompt for OpenAI completion
instructions = """You are a scheduler assistant for breaking complex tasks into actionable chunks. Your goal is to provide a list of small actions that build up to the project chosen by the user."""
prompt = f"You will break down this task: {project} into 10 small subtasks, with the following considerations: {details}"
# Request completion from OpenAI API
logging.info('Sending completion request to OpenAI API')
completion = openai_client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": instructions},
{"role": "user", "content": prompt}
]
)
logging.debug(f'Received response from OpenAI API: {completion.choices[0].message.content}')
response = completion.choices[0].message.content
# Parse response into a list of subtasks
lines = response.strip().split('\n')
subtasks = [line.split('. ', 1)[1] for line in lines if line.startswith(tuple(f"{i}. " for i in range(1, 11)))]
context = {
'project': project,
'details': details,
'subtasks': subtasks
}
logging.info('Returning response with subtasks')
return jsonify(context), 200
@app.route('/save_to_ical/', methods=['POST'])
def save_to_ical():
try:
logging.info('Received POST request for /save_to_ical route')
data = request.get_json()
logging.debug(f'Received data: {data}')
project = data['project']
details = data['details']
subtasks = data['subtasks']
ical_content = f"BEGIN:VCALENDAR\nVERSION:2.0\nPRODID:-//My Calendar//EN\n"
# Add project event
ical_content += f"BEGIN:VEVENT\n"
ical_content += f"SUMMARY:{project}\n"
ical_content += f"DESCRIPTION:{details}\n"
ical_content += f"DTSTART:{datetime.datetime.now().strftime('%Y%m%dT%H%M%S')}\n"
ical_content += f"DTEND:{datetime.datetime.now().strftime('%Y%m%dT%H%M%S')}\n"
ical_content += f"END:VEVENT\n"
# Add subtasks as separate events
for subtask in subtasks:
ical_content += f"BEGIN:VEVENT\n"
ical_content += f"SUMMARY:{subtask}\n"
ical_content += f"DTSTART:{datetime.datetime.now().strftime('%Y%m%dT%H%M%S')}\n"
ical_content += f"DTEND:{datetime.datetime.now().strftime('%Y%m%dT%H%M%S')}\n"
ical_content += f"END:VEVENT\n"
ical_content += "END:VCALENDAR"
# Create a response with the iCal content
response = Response(ical_content, mimetype='text/calendar')
response.headers.set('Content-Disposition', 'attachment', filename='tasks.ics')
logging.info('Returning iCal file response')
return response
except Exception as e:
logging.error(f'Error occurred: {e}')
return jsonify({'error': str(e)}), 500
# This only runs if called with 'python3 app.py'. Port has to be passed to flask by CLI parameter.
if __name__ == '__main__':
app.run(debug=True, port=5001)