Skip to content

Commit

Permalink
[demo] partially updating examples for streaming mode and openAI 1.1.…
Browse files Browse the repository at this point in the history
…1 API
  • Loading branch information
forrestbao authored and Yazawazi committed Nov 28, 2023
1 parent 7b86251 commit 06a26c2
Show file tree
Hide file tree
Showing 5 changed files with 68 additions and 67 deletions.
31 changes: 31 additions & 0 deletions examples/AI/chatGPT.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import os # Python's native


# import funix
# @funix.funix(
# rate_limit=funix.decorator.Limiter.session(max_calls=2, period=60*60*24),
# show_source=True
# )

# def ChatGPT_simplest(prompt: str) -> str:
# completion = openai.ChatCompletion.create(
# messages=[{"role": "user", "content": prompt}], model="gpt-3.5-turbo"
# )
# return completion["choices"][0]["message"]["content"]

OPENAI_KEY = os.environ.get("OPENAI_API_KEY")
import openai
import IPython # a famous library for interactive python

def ChatGPT(prompt: str) -> IPython.display.Markdown:
client = openai.OpenAI() # defaults to os.environ.get("OPENAI_API_KEY")
response = client.chat.completions.create(
messages=[{"role": "user", "content": prompt}],
model="gpt-3.5-turbo",
# stream=True,
)
return response.choices[0].message.content
# message = []
# for chunk in response:
# message.append(chunk.choices[0].delta.content or "")
# yield "".join(message)
65 changes: 31 additions & 34 deletions examples/AI/chatGPT_advanced.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,51 @@
import os
import typing

import ipywidgets
import IPython

import openai

# openai.api_key = os.environ.get("OPENAI_KEY")
# Disable this app from being called in public hosted examples

import funix


@funix.new_funix_type(
{
"name": "textarea",
"config": {
"rows": 6,
},
}
)
class PromptBox(str):
pass
# @funix.new_funix_type(
# {
# "name": "textarea",
# "config": {
# "rows": 6,
# },
# }
# )
# class PromptBox(str):
# pass


cfg = { # declarative configuration, all in one place
"description": """Try the **ChatGPT** app in [Funix](http://funix.io), the minimalist way to build apps in Python. """,
"argument_labels": {
"prompt": "_What do you wanna ask?_",
"max_tokens": "**Length** of the answer",
},
"description": """The ChatGPT app build in [Funix.io]""",
"conditional_visible": [
{"when": {"show_advanced_options": True}, "show": ["max_tokens", "model"]}
{"when": {"show_advanced_options": True}, "show": ["max_tokens", "stream"]}
],
"rate_limit": funix.decorator.Limiter.session(max_calls=2, period=60 * 60 * 24),
# "rate_limit": funix.decorator.Limiter.session(max_calls=2, period=60 * 60 * 24),
}


@funix.funix(**cfg)
def ChatGPT_advanced(
prompt: PromptBox,
prompt: str,
show_advanced_options: bool = True,
model: typing.Literal["gpt-3.5-turbo", "gpt-3.5-turbo-0613"] = "gpt-3.5-turbo",
max_tokens: range(100, 500, 50) = 150,
# openai_key: ipywidgets.Password = "use environment variable",
) -> str:
# if openai_key != "use environment variable":
# openai.api_key = openai_key
completion = openai.ChatCompletion.create(
stream: typing.Literal[True, False] = True,
max_tokens: range(100, 500, 50) = 150
) -> IPython.display.Markdown:

client = openai.OpenAI() # defaults to os.environ.get("OPENAI_API_KEY")
response = client.chat.completions.create(
messages=[{"role": "user", "content": prompt}],
model=model,
max_tokens=max_tokens,
model="gpt-3.5-turbo",
stream=stream,
max_tokens=max_tokens
)
return completion["choices"][0]["message"]["content"]
if stream:
message = []
for chunk in response:
message.append(chunk.choices[0].delta.content or "")
yield "".join(message)
else: # no stream
return response.choices[0].message.content
21 changes: 0 additions & 21 deletions examples/AI/chatGPT_lazy.py

This file was deleted.

4 changes: 0 additions & 4 deletions examples/hello_world.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,3 @@
import funix
@funix.funix()

# The two lines above can be commented out if using the lazy model, i.e., $ funix -l hello_world.py

def hello_world(name: str="Funix") -> str:
return f"Hello {name}!"
14 changes: 6 additions & 8 deletions examples/stream.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
import time
from funix import funix
import time

def stream() -> str:
message = "Freedom has many difficulties and democracy is not perfect, but we have never had to put a wall up to keep our people in, to prevent them from leaving us. I want to say, on behalf of my countrymen, who live many miles away on the other side of the Atlantic, who are far distant from you, that they take the greatest pride that they have been able to share with you, even from a distance, the story of the last 18 years. I know of no town, no city, that has been besieged for 18 years that still lives with the vitality and the force and the hope and the determination of the city of West Berlin. While the wall is the most obvious and vivid demonstration of the failures of the communist system, for all the world to see, we take no satisfaction in it, for it is, as your mayor has said, an offense not only against history but an offense against humanity, separating families, dividing husbands and wives and brothers and sisters, and dividing a people who wish to be joined together. -- President John F. Kennedy at the Rudolph Wilde Platz, Berlin, June 26, 1963."

@funix()
def oh_iam_yield() -> str:
yield "This is a function that needs 10 secs to run."
for i in range(10):
time.sleep(1)
yield f"Update {i + 1}/10, Time: {time.time()}\n"
for i in range(len(message)):
time.sleep(0.01)
yield message[0:i]

0 comments on commit 06a26c2

Please sign in to comment.