Skip to content

Commit

Permalink
fix basic function
Browse files Browse the repository at this point in the history
remove client for session and using global variable to pass result
also adding unit test goal
  • Loading branch information
alvaro-vinuela committed May 17, 2024
1 parent 962c11c commit 649b541
Showing 1 changed file with 14 additions and 28 deletions.
42 changes: 14 additions & 28 deletions engineered_chatgpt_prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import asyncio
import os
import sys
import traceback

import openai
from dotenv import load_dotenv, find_dotenv
from PyQt5.QtWidgets import (QApplication, # pylint: disable=no-name-in-module
Expand All @@ -21,10 +21,9 @@

_ = load_dotenv(find_dotenv()) # read local .env file

client = openai.AsyncOpenAI(
api_key=os.getenv('OPENAI_API_KEY'),
organization=os.getenv('OPENAI_ORGANIZATION'),
)
openai.api_key = os.getenv('OPENAI_API_KEY')
openai.organization = os.getenv('OPENAI_ORGANIZATION')
response = ""

print("OpenAI version:", openai.__version__)

Expand All @@ -36,41 +35,26 @@ async def get_completion(prompt,
method to query openai API
"""
messages = [{"role": "user", "content": prompt}]
chat = None
try:
# chat = openai.ChatCompletion.create(
chat = await client.chat.completions.create(
chat = openai.ChatCompletion.create(
model=model,
messages=messages,
temperature=0,
stream=True,
# stream=True,
# this is the randomness degree of the model's output
)

except openai.APIConnectionError as e:
print("The server could not be reached")
print(e.__cause__) # an underlying Exception, likely raised within http
except openai.RateLimitError as e:
print("A 429 status code was received; we should back off a bit.")
except openai.APIStatusError as e:
print("Another non-200-range status code was received")
print(e.status_code)
print(e.response)
return None

response = ""
async for part in chat:
response += part.choices[0].delta.content or ""
sys.stdout.write(f"\r{response}>")
sys.stdout.flush()
# print(response)

global response
response = chat.choices[0].message["content"]
sys.stdout.write(f"\r{response}>")
sys.stdout.flush()
return response


class EngineeredChatgptPrompts(
QWidget): # pylint: disable=too-many-instance-attributes
"""
class to hold widgets and preocess method of main application
class to hold widgets and process method of main application
"""

def __init__(self):
Expand Down Expand Up @@ -147,6 +131,8 @@ def process_text(self):
f"process the following text with specified goal"
f"(delimited by triple backticks): ```{input_text}```")
asyncio.run(get_completion(complete_prompt))
global response
self.output_text.setPlainText(response)

def load_goal(self):
""" open a dialog inspecting text files on file system """
Expand Down

0 comments on commit 649b541

Please sign in to comment.