-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: rip out toolcache so we can publish to pypi (#116)
- Loading branch information
1 parent
3da2a23
commit 40ab4cb
Showing
3 changed files
with
47 additions
and
19 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,35 +1,68 @@ | ||
import contextlib | ||
import functools | ||
import hashlib | ||
import inspect | ||
import os | ||
from inspect import iscoroutinefunction | ||
import pickle | ||
from typing import Any | ||
|
||
import toolcache | ||
import a_sync | ||
import aiofiles | ||
import aiofiles.os | ||
from a_sync._typing import AnyFn, P, T | ||
from brownie import chain | ||
|
||
cache_base_path = f"./cache/{chain.id}/" | ||
BASE_PATH = f"./cache/{chain.id}/" | ||
|
||
|
||
def cache_to_disk(fn: AnyFn[P, T]) -> AnyFn[P, T]: | ||
module = fn.__module__.replace(".", "/") | ||
cache_path_for_fn = cache_base_path + module + "/" + fn.__name__ | ||
cache_path_for_fn = BASE_PATH + module + "/" + fn.__name__ | ||
|
||
# Ensure the cache dir exists | ||
os.makedirs(cache_path_for_fn, exist_ok=True) | ||
def get_cache_file_path(args, kwargs): | ||
# Create a unique filename based on the function arguments | ||
key = hashlib.md5(pickle.dumps((args, sorted(kwargs.items())))).hexdigest() | ||
return os.path.join(cache_path_for_fn, f"{key}.json") | ||
|
||
cache_decorator = toolcache.cache("disk", cache_dir=cache_path_for_fn) | ||
os.makedirs(cache_path_for_fn, exist_ok=True) | ||
|
||
if iscoroutinefunction(fn): | ||
if inspect.iscoroutinefunction(fn): | ||
|
||
@cache_decorator | ||
@functools.wraps(fn) | ||
async def disk_cache_wrap(*args: P.args, **kwargs: P.kwargs) -> T: | ||
return await fn(*args, **kwargs) | ||
cache_path = get_cache_file_path(args, kwargs) | ||
if await aiofiles.os.path.exists(cache_path): | ||
async with aiofiles.open(cache_path, "rb") as f: | ||
with contextlib.suppress(EOFError): | ||
return pickle.loads(await f.read()) | ||
async_result: T = await fn(*args, **kwargs) | ||
_cache_write(cache_path, async_result) | ||
return async_result | ||
|
||
else: | ||
|
||
@cache_decorator | ||
@functools.wraps(fn) | ||
def disk_cache_wrap(*args: P.args, **kwargs: P.kwargs) -> T: | ||
return fn(*args, **kwargs) # type: ignore [return-value] | ||
cache_path = get_cache_file_path(args, kwargs) | ||
if os.path.exists(cache_path): | ||
with open(cache_path, "rb") as f: | ||
with contextlib.suppress(EOFError): | ||
return pickle.load(f) | ||
|
||
sync_result: T = fn(*args, **kwargs) # type: ignore [assignment, return-value] | ||
_cache_write(cache_path, sync_result) | ||
return sync_result | ||
|
||
return disk_cache_wrap | ||
|
||
|
||
def _cache_write(cache_path: str, result: Any) -> None: | ||
a_sync.create_task( | ||
coro=__cache_write(cache_path, result), | ||
skip_gc_until_done=True, | ||
) | ||
|
||
|
||
async def __cache_write(cache_path: str, result: Any) -> None: | ||
async with aiofiles.open(cache_path, "wb") as f: | ||
await f.write(pickle.dumps(result)) |