Skip to content

Commit

Permalink
Revert "fix basic function"
Browse files Browse the repository at this point in the history
  • Loading branch information
alvaro-vinuela authored May 17, 2024
1 parent bce6bcb commit 5e1ed09
Showing 1 changed file with 28 additions and 14 deletions.
42 changes: 28 additions & 14 deletions engineered_chatgpt_prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import asyncio
import os
import sys

import traceback
import openai
from dotenv import load_dotenv, find_dotenv
from PyQt5.QtWidgets import (QApplication, # pylint: disable=no-name-in-module
Expand All @@ -21,9 +21,10 @@

_ = load_dotenv(find_dotenv()) # read local .env file

openai.api_key = os.getenv('OPENAI_API_KEY')
openai.organization = os.getenv('OPENAI_ORGANIZATION')
response = ""
client = openai.AsyncOpenAI(
api_key=os.getenv('OPENAI_API_KEY'),
organization=os.getenv('OPENAI_ORGANIZATION'),
)

print("OpenAI version:", openai.__version__)

Expand All @@ -35,26 +36,41 @@ async def get_completion(prompt,
method to query openai API
"""
messages = [{"role": "user", "content": prompt}]
chat = openai.ChatCompletion.create(
chat = None
try:
# chat = openai.ChatCompletion.create(
chat = await client.chat.completions.create(
model=model,
messages=messages,
temperature=0,
# stream=True,
stream=True,
# this is the randomness degree of the model's output
)


global response
response = chat.choices[0].message["content"]
sys.stdout.write(f"\r{response}>")
sys.stdout.flush()
except openai.APIConnectionError as e:
print("The server could not be reached")
print(e.__cause__) # an underlying Exception, likely raised within http
except openai.RateLimitError as e:
print("A 429 status code was received; we should back off a bit.")
except openai.APIStatusError as e:
print("Another non-200-range status code was received")
print(e.status_code)
print(e.response)
return None

response = ""
async for part in chat:
response += part.choices[0].delta.content or ""
sys.stdout.write(f"\r{response}>")
sys.stdout.flush()
# print(response)
return response


class EngineeredChatgptPrompts(
QWidget): # pylint: disable=too-many-instance-attributes
"""
class to hold widgets and process method of main application
class to hold widgets and preocess method of main application
"""

def __init__(self):
Expand Down Expand Up @@ -131,8 +147,6 @@ def process_text(self):
f"process the following text with specified goal"
f"(delimited by triple backticks): ```{input_text}```")
asyncio.run(get_completion(complete_prompt))
global response
self.output_text.setPlainText(response)

def load_goal(self):
""" open a dialog inspecting text files on file system """
Expand Down

0 comments on commit 5e1ed09

Please sign in to comment.