Skip to content
This repository has been archived by the owner on Jul 24, 2024. It is now read-only.

Commit

Permalink
Update sscGPT.py
Browse files Browse the repository at this point in the history
  • Loading branch information
NoDataFound authored May 9, 2023
1 parent 5b2b9a0 commit eeaa65c
Showing 1 changed file with 12 additions and 7 deletions.
19 changes: 12 additions & 7 deletions tools/sscGPT/sscGPT.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,15 +251,16 @@ def parse_html_to_text(html_content):

if search_type == "ASI Query from URL":
url = st.sidebar.text_input("", placeholder="Enter URL and press enter")
#send_to_asi = st.sidebar.checkbox("Check this box to send search to ASI")
generated_text_chunks = []
if url:

try:
chunk_size = 2500
chunk_size = 2800
response = requests.get(url)
response.raise_for_status()
parsed_text = parse_html_to_text(response.text)

num_chunks = len(parsed_text) // chunk_size + (len(parsed_text) % chunk_size > 0)
st.warning(f"`{num_chunks}` x `{chunk_size}` token (word) packages will be submitted to OpenAI model: `text-davinci-003`")
for i in range(0, len(parsed_text), chunk_size):
Expand All @@ -280,18 +281,22 @@ def parse_html_to_text(html_content):
col1, col2, col3 = st.columns(3)
col1.metric("HTML Word Count", total_size,total_size )
col2.metric("Token Packages", num_chunks,num_chunks )
col3.metric("Prompt Token Count", len(prompt), len(prompt))
col3.metric("Total Prompt Token Count", len(prompt), len(prompt))
st.markdown("----")
st.info("Generated Attack Surface Intelligence Query from URL")


except requests.exceptions.RequestException as e:
st.error(f"Error occurred while fetching the URL: {e}")

generated_text = '\n'.join(generated_text_chunks)
query = completions.choices[0].text.strip()
assets = search_assets(query)
st.write(f"{generated_text_chunks[0]}")
if st.checkbox("Check this box to send search to ASI"):
query_str = generated_text_chunks[0].split(":")[1].strip() # Extract the query string
#st.write(f"Query: {query_str}")
assets = search_assets(query_str)
st.write(assets)
st.write(f"{generated_text_chunks[0]}")

#st.write(total_size)
#st.write(generated_text)
Expand Down

0 comments on commit eeaa65c

Please sign in to comment.