Skip to content

Commit

Permalink
Updates pyproject.toml. Updates tests. Splits grc price retrieving fu…
Browse files Browse the repository at this point in the history
…nction into retriever and parser. Adds soups for testing.
  • Loading branch information
jwaggy committed Feb 8, 2025
1 parent 55a1931 commit 92073bb
Show file tree
Hide file tree
Showing 6 changed files with 110 additions and 79 deletions.
102 changes: 57 additions & 45 deletions grc_price_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,59 @@

GRC_PRICE_URLS = ("https://www.bybit.com/en/coin-price/gridcoin-research/", "https://coinstats.app/coins/gridcoin/", "https://marketcapof.com/crypto/gridcoin-research/")

def parse_grc_price_soup(url: str, price_soup: str) -> tuple[Union[float, None], str, str]:
float_price = None
info_message = ""
url_message = ""

def get_grc_price_from_site() -> tuple[Union[float, None], str, list, list, list]:
soup = BeautifulSoup(price_soup, "html.parser")

if url == "https://www.bybit.com/en/coin-price/gridcoin-research/":
pre_price = soup.find("div", attrs={"data-cy": "coinPrice"})

if pre_price is not None:
try:
price = pre_price.text.replace("$", "").strip()
float_price = float(price)
info_message = f"Found GRC price of {float_price} from {url}"
except Exception:
url_message = f"Error getting info from {url}"
else:
url_message = f"Error getting info from {url}"
elif url == "https://coinstats.app/coins/gridcoin/":
pre_price = soup.find("div", class_="CoinOverview_mainPrice__YygaC")

if pre_price is not None:
try:
price = pre_price.p.text.replace("$", "").strip()
float_price = float(price)
info_message = f"Found GRC price of {float_price} from {url}"
except Exception:
url_message = f"Error getting info from {url}"
else:
url_message = f"Error getting info from {url}"
elif url == "https://marketcapof.com/crypto/gridcoin-research/":
pre_pre_price = soup.find("div", class_="price")

if pre_pre_price is not None:
pre_price = pre_pre_price.find(string=True, recursive=False)

if pre_price is not None:
try:
price = pre_price.replace("$", "").strip()
float_price = float(price)
info_message = f"Found GRC price of {float_price} from {url}"
except Exception:
url_message = f"Error getting info from {url}"
else:
url_message = f"Error getting info from {url}"
else:
url_message = f"Error getting info from {url}"

return float_price, url_message, info_message


def get_grc_price_from_sites() -> tuple[Union[float, None], str, list, list, list]:
headers = requests.utils.default_headers()
headers["User-Agent"] = random.choice(AGENTS)
found_prices = []
Expand All @@ -37,52 +88,13 @@ def get_grc_price_from_site() -> tuple[Union[float, None], str, list, list, list
error_logger_messages.append(f"Error fetching stats from {url}: {error}")
continue

soup = BeautifulSoup(response.content, "html.parser")
price, url_message, info_message = parse_grc_price_soup(url, response.content)

if url == "https://www.bybit.com/en/coin-price/gridcoin-research/":
pre_price = soup.find("div", attrs={"data-cy": "coinPrice"})
if price is not None:
found_prices.append(price)

if pre_price is not None:
try:
price = pre_price.text.replace("$", "").strip()
float_price = float(price)
found_prices.append(float_price)
info_logger_messages.append(f"Found GRC price of {float_price} from {url}")
except Exception:
url_messages.append(f"Error getting info from {url}")
else:
url_messages.append(f"Error getting info from {url}")
elif url == "https://coinstats.app/coins/gridcoin/":
pre_price = soup.find("div", class_="CoinOverview_mainPrice__YygaC")

if pre_price is not None:
try:
price = pre_price.p.text.replace("$", "").strip()
float_price = float(price)
found_prices.append(float_price)
info_logger_messages.append(f"Found GRC price of {float_price} from {url}")
except Exception:
url_messages.append(f"Error getting info from {url}")
else:
url_messages.append(f"Error getting info from {url}")
elif url == "https://marketcapof.com/crypto/gridcoin-research/":
pre_pre_price = soup.find("div", class_="price")

if pre_pre_price is not None:
pre_price = pre_pre_price.find(string=True, recursive=False)

if pre_price is not None:
try:
price = pre_price.replace("$", "").strip()
float_price = float(price)
found_prices.append(float_price)
info_logger_messages.append(f"Found GRC price of {float_price} from {url}")
except Exception:
url_messages.append(f"Error getting info from {url}")
else:
url_messages.append(f"Error getting info from {url}")
else:
url_messages.append(f"Error getting info from {url}")
url_messages.append(url_message)
info_logger_messages.append(info_message)

if len(found_prices) > 0:
table_message = f"Found GRC price {sum(found_prices) / len(found_prices)}"
Expand Down
4 changes: 2 additions & 2 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from requests.auth import HTTPBasicAuth
from typing import List, Union, Dict, Tuple, Set, Any
import sys, signal
from grc_price_utils import get_grc_price_from_site
from grc_price_utils import get_grc_price_from_sites

# This is needed for some async stuff
import nest_asyncio
Expand Down Expand Up @@ -968,7 +968,7 @@ def get_grc_price(sample_text: str = None) -> Union[float, None]:
Raises:
Exception: An error occurred accessing an online GRC price source.
"""
price, table_message, url_messages, info_log_messages, error_log_messages = get_grc_price_from_site()
price, table_message, url_messages, info_log_messages, error_log_messages = get_grc_price_from_sites()

for log_message in info_log_messages:
log.info(log_message)
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ urllib3 = "^2.2.3"
xmltodict = "^0.14.2"
zope-interface = "^7.2"
nest-asyncio = "^1.6.0"
beautifulsoup4 = "^4.13.3"


[build-system]
Expand Down
28 changes: 18 additions & 10 deletions tests/main_tests.py

Large diffs are not rendered by default.

37 changes: 15 additions & 22 deletions tests/network/network_tests.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import pytest
import main
import grc_price_utils
from typing import Dict,List,Tuple,Union,Any
# Tests that require a network connection and will fail without one
APPROVED_PROJECT_URLS={}

@pytest.fixture()
def test_get_approved_project_urls_web():
"""
Expand All @@ -11,31 +13,22 @@ def test_get_approved_project_urls_web():
"""
global APPROVED_PROJECT_URLS
APPROVED_PROJECT_URLS=main.get_approved_project_urls_web()


def test_get_project_mag_ratios_from_url(test_get_approved_project_urls_web):
result=main.get_project_mag_ratios_from_url(30,APPROVED_PROJECT_URLS)
assert len(result)>3
def test_get_grc_price_regex():
# Function to test the regexes for getting grc price. Note this may fail if you get a "are you a bot?" page.
# Inspect HTML before assuming the regex is broken
import requests as req
import re
headers = req.utils.default_headers()
headers.update({
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36',
})

sample_text=None
for url, info in main.PRICE_URL_DICT.items():
regex = info[1]
name = info[0]
resp = ''
if sample_text:
resp = sample_text
else:
resp = req.get(url, headers=headers).text
regex_result = re.search(regex, resp)
assert regex_result
float(regex_result.group(2))


def test_get_grc_price():
# Function to test the soup finds for getting the grc price. Note this may fail if you get a "are you a bot?" page.
# Inspect the html before assuming that the finds are broken.
price, _, _, _, _ = grc_price_utils.get_grc_price_from_sites()

assert price
assert isinstance(price,float)


def test_grc_grc_price():
answer=main.get_grc_price()
assert isinstance(answer,float)
17 changes: 17 additions & 0 deletions tests/soups.py

Large diffs are not rendered by default.

0 comments on commit 92073bb

Please sign in to comment.