Skip to content

Commit

Permalink
save extracted shoreline output as json not pkl
Browse files Browse the repository at this point in the history
  • Loading branch information
2320sharon committed Sep 21, 2023
1 parent f840c20 commit f66bafb
Showing 1 changed file with 25 additions and 25 deletions.
50 changes: 25 additions & 25 deletions src/coastsat/SDS_shoreline.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,29 +49,29 @@
np.seterr(all="ignore") # raise/ignore divisions by 0 and nans


# @todo remove this after testing imports
def test_imports():
print(
list(
os.path.abspath(resource)
for resource in importlib.resources.files(models).iterdir()
if resource.is_file()
)
)
print(
list(
os.path.abspath(resource)
for resource in importlib.resources.files(training_data).iterdir()
if resource.is_file()
)
)
print(
list(
os.path.abspath(resource)
for resource in importlib.resources.files(training_sites).iterdir()
if resource.is_file()
)
)
# # @todo remove this after testing imports
# def test_imports():
# print(
# list(
# os.path.abspath(resource)
# for resource in importlib.resources.files(models).iterdir()
# if resource.is_file()
# )
# )
# print(
# list(
# os.path.abspath(resource)
# for resource in importlib.resources.files(training_data).iterdir()
# if resource.is_file()
# )
# )
# print(
# list(
# os.path.abspath(resource)
# for resource in importlib.resources.files(training_sites).iterdir()
# if resource.is_file()
# )
# )


# Main function for batch shoreline detection
Expand Down Expand Up @@ -369,8 +369,8 @@ def extract_shorelines(

# save outputput structure as output.pkl
filepath = os.path.join(filepath_data, sitename)
with open(os.path.join(filepath, sitename + "_output.pkl"), "wb") as f:
pickle.dump(output, f)
json_path = os.path.join(filepath, sitename + "_output.json")
SDS_preprocess.write_to_json(json_path, output)

return output

Expand Down

0 comments on commit f66bafb

Please sign in to comment.