Skip to content

Commit

Permalink
Merge pull request #280 from rmodrak/update_syngine_client
Browse files Browse the repository at this point in the history
Updates syngine client
  • Loading branch information
rmodrak authored Nov 21, 2024
2 parents fbc832f + 8153247 commit b202347
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 17 deletions.
15 changes: 11 additions & 4 deletions mtuq/io/clients/syngine.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ class Client(ClientBase):
"""

def __init__(self, path_or_url=None, model=None,
include_mt=True, include_force=False):
include_mt=True, include_force=False,
cache_path=None):

if not path_or_url:
path_or_url = 'http://service.iris.edu/irisws/syngine/1'
Expand All @@ -55,6 +56,8 @@ def __init__(self, path_or_url=None, model=None,
self.include_mt = include_mt
self.include_force = include_force

self.cache_path = cache_path


def get_greens_tensors(self, stations=[], origins=[], verbose=False):
""" Downloads Green's tensors
Expand Down Expand Up @@ -82,7 +85,8 @@ def _get_greens_tensor(self, station=None, origin=None):

if self.include_mt:
dirname = download_unzip_mt_response(
self.url, self.model, station, origin)
self.url, self.model, station, origin,
cache_path=self.cache_path)

for filename in GREENS_TENSOR_FILENAMES:
stream += obspy.read(dirname+'/'+filename, format='sac')
Expand Down Expand Up @@ -148,7 +152,9 @@ def _get_greens_tensor(self, station=None, origin=None):



def download_greens_tensors(stations=[], origins=[], model='', verbose=False, **kwargs):
def download_greens_tensors(stations=[], origins=[], model='',
cache_path=None, verbose=False, **kwargs):

""" Downloads Green's tensors from syngine
Downloads Green's functions for all combinations of stations and origins
Expand All @@ -173,4 +179,5 @@ def download_greens_tensors(stations=[], origins=[], model='', verbose=False, **
"""
client = Client(model=model, **kwargs)
return client.get_greens_tensors(stations, origins, verbose=verbose)
return client.get_greens_tensors(
stations, origins, cache_path=cache_path, verbose=verbose)
37 changes: 24 additions & 13 deletions mtuq/util/syngine.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def resolve_model(name):
raise ValueError('Bad model')


def download_unzip_mt_response(url, model, station, origin, verbose=True):
def download_unzip_mt_response(url, model, station, origin, cache_path=None, verbose=True):
""" Downloads Green's functions through syngine URL interface
"""
url = (url+'/'+'query'
Expand All @@ -77,12 +77,8 @@ def download_unzip_mt_response(url, model, station, origin, verbose=True):
+'&origintime='+str(origin.time)[:-1]
+'&starttime='+str(origin.time)[:-1])

try:
dirname = os.environ['SYNGINE_CACHE']
except:
dirname = fullpath('data/greens_tensor/syngine/cache/')

path = abspath(join(dirname, str(url2uuid(url))))
# where the Green's functions will be cached locally
path = join(_check(cache_path), str(url2uuid(url)))

if exists(path):
# if unzipped directory already exists, return its absolute path
Expand Down Expand Up @@ -110,13 +106,13 @@ def download_unzip_mt_response(url, model, station, origin, verbose=True):
return path


def download_synthetics(url, model, station, origin, source):
def download_synthetics(url, model, station, origin, source, cache_path=None):
""" Downloads synthetics through syngine URL interface
"""
if len(source)==6:
args='&sourcemomenttensor='+re.sub('\+','',",".join(map(str, source)))
args='&sourcemomenttensor='+re.sub(r'\+','',",".join(map(str, source)))
elif len(source)==3:
args='&sourceforce='+re.sub('\+','',",".join(map(str, source)))
args='&sourceforce='+re.sub(r'\+','',",".join(map(str, source)))
else:
raise TypeError

Expand All @@ -133,13 +129,15 @@ def download_synthetics(url, model, station, origin, source):
+'&starttime='+str(origin.time)[:-1])

if len(source)==6:
url+='&sourcemomenttensor='+re.sub('\+','',",".join(map(str, source)))
url+='&sourcemomenttensor='+re.sub(r'\+','',",".join(map(str, source)))
elif len(source)==3:
url+='&sourceforce='+re.sub('\+','',",".join(map(str, source)))
url+='&sourceforce='+re.sub(r'\+','',",".join(map(str, source)))
else:
raise TypeError

filename = fullpath('data/greens_tensor/syngine/cache/', str(url2uuid(url)))
# where synthetics will be cached locally
filename = join(_check(cache_path), str(url2uuid(url)))

if exists(filename):
return filename
elif exists(filename+'.zip'):
Expand Down Expand Up @@ -204,3 +202,16 @@ def get_synthetics_syngine(url, model, station, origin, mt):



def _check(path):
try:
assert path is not None
assert os.access(path, os.W_OK)
except:
try:
path = fullpath('data/greens_tensor/syngine/cache/')
assert os.access(path, os.W_OK)
except:
path = abspath('./syngine/cache')
os.makedirs(path, exists_ok=True)
return path

0 comments on commit b202347

Please sign in to comment.