Skip to content

Commit

Permalink
Initial commit after cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
Egil committed Sep 8, 2024
0 parents commit 9e62218
Show file tree
Hide file tree
Showing 69 changed files with 11,117 additions and 0 deletions.
50 changes: 50 additions & 0 deletions .github/workflows/python-app.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]

jobs:
py310:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.10
uses: actions/setup-python@v2
with:
python-version: "3.10"
- name: Install dependencies
run: |
pip install nose2[coverage_plugin]
pip install "git+https://${{ secrets.deployment_user }}@github.com/emerald-geomodelling/simpeg.git@simpleem3"
pip install "git+https://${{ secrets.deployment_user }}@github.com/emerald-geomodelling/EmeraldProcessing.git"
pip install utm
pip install pyMKL
pip install -e .
- name: Test with nose2
run: |
nose2 --coverage-report=xml --with-coverage --plugin nose2.plugins.junitxml --junit-xml -v -s tests pipeline
- name: JUnit version fix
if: always()
run: |
{
echo '<?xml version="1.0" encoding="UTF-8"?>'
grep "<testsuite " nose2-junit.xml | sed -e "s+testsuite+testsuites+g";
cat nose2-junit.xml
echo '</testsuites>'
} > x
mv x nose2-junit.xml
cat nose2-junit.xml
- name: Generate badges
if: always()
uses: gaelgirodon/ci-badges-action@v1
with:
gist-id: 9e15232e12bf2ddf537185b43ca2060f
token: ${{ secrets.GIST_TOKEN }}
- name: Clear badge cache
if: always()
shell: bash
run: |
urls=$(curl -sLk https://${{ secrets.deployment_user }}@github.com/${{github.repository_owner}}/${{ github.event.repository.name }}/tree/${{ steps.branch-name.outputs.current_branch }}|grep -Eo "(http|https)://camo.githubusercontent.com[a-zA-Z0-9./?=_%:-]*")
while IFS= read -r line; do curl -X PURGE $line ; done <<< $urls
9 changes: 9 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
__pycache__
*~
*.pyc
.ipynb_checkpoints/
*.egg-info/
*.xyz
*.pickle
*.idea/
docs/data
21 changes: 21 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
MIT License

Copyright (c) 2024 EMerald Geomodelling

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
29 changes: 29 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
![Endpoint Badge](https://img.shields.io/endpoint?url=https%3A%2F%2Fgist.githubusercontent.com%2Fredhog%2F9e15232e12bf2ddf537185b43ca2060f%2Fraw%2Fcb3a10893e74a0f97e94bcb7051c69270157fe49%2Fexperimental-pipeline-inversion-junit-tests.json)
![Endpoint Badge](https://img.shields.io/endpoint?url=https%3A%2F%2Fgist.githubusercontent.com%2Fredhog%2F9e15232e12bf2ddf537185b43ca2060f%2Fraw%2Fcb3a10893e74a0f97e94bcb7051c69270157fe49%2Fexperimental-pipeline-inversion-cobertura-coverage.json)

# emerald-beryl-pipeline

Luigi based pipeline to run inversions using [our simplified SimPEG wrapper](https://github.com/emerald-geomodelling/experimental-simpeg-ext)

To run this pipeline locally:

```
luigi --module beryl_pipeline.inversion Inversion --inversion-name=file:///some/temp/dir
```

This assumes you've copied `docs/example-real.yml` to `/some/temp/dir/config.yml`.


# Documentation

* Documentation on how to [run all the pipelines from anotebook](docs/run%20processing%20inversion%20luigi.ipynb)
* Documentation on how to [extract the API documentation](docs/run%20introspection.ipynb) used to generate the front end UI
* This should be used to test any changes / additions to e.g. processing pipeline filters

# Unit tests

To run the unit tests, first `pip install nose2` and then run

```
nose2 -s tests
```
1 change: 1 addition & 0 deletions beryl_pipeline/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Do NOT import everything here, as importing e.g. the inversion modules is SLOW!
165 changes: 165 additions & 0 deletions beryl_pipeline/file_import.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
import luigi
import luigi.contrib.opener
import luigi.format
import libaarhusxyz
import yaml
import tempfile
import shutil
from . import utils
from . import localize
import poltergust_luigi_utils.caching
import poltergust_luigi_utils.logging_task
import typing
import pydantic
import importlib.metadata
import pandas as pd
import os
import copy
import slugify


Projection = typing.Annotated[
int,
{"json_schema": {
"format": "x-epsg",
}}]

LibaarhusXYZImporterSelf = typing.TypeVar("Self", bound="LibaarhusXYZImporter")
class LibaarhusXYZImporter(libaarhusxyz.Survey):
json_schema = {"hide": True}
api_type = "__init__"
def __init__(self: LibaarhusXYZImporterSelf,
files: typing.Annotated[dict, {"json_schema":
{"type": "object",
"x-format": "multi-url",
"description": "Required: .gex, .xyz | Optional: .alc",
"properties": {
"xyzfile": {"minLength": 5, "pattern": "\.xyz$", "type": "string", "format": "url", "description": "The data itself"},
"gexfile": {"minLength": 5, "pattern": "\.gex$", "type": "string", "format": "url", "description": "System description / calibration file"},
"alcfile": {"minLength": 5, "pattern": "\.alc$", "type": "string", "format": "url", "description": "Allocation file (column name mapping)"},
}}}],
scalefactor = 1e-12,
projection: Projection = None):
"""Import SkyTEM data
Parameters
----------
scalefactor :
Data unit, 1 = volt, 1e-12 = picovolt
projection :
EPSG code for the projection and chart datum of sounding locations
"""
xyzfile = files.get("xyzfile")
gexfile = files.get("gexfile")
alcfile = files.get("alcfile")

assert isinstance(projection, int) and projection > 0, "Invalid projection, please provide a valid projection"
assert isinstance(scalefactor, float) and scalefactor != 0, "Invalid scalefactor, please provide a valid scalefactor"
assert xyzfile is not None, "Missing xyz file"
assert gexfile is not None, "Missing gex file"

xyz = libaarhusxyz.XYZ(xyzfile, alcfile=alcfile)
if scalefactor:
xyz.model_info['scalefactor'] = scalefactor
if projection:
xyz.model_info['projection'] = projection
xyz.normalize(naming_standard="alc")

assert "projection" in xyz.model_info
assert "scalefactor" in xyz.model_info

# Check for None only to support inversion_workbench_import
gex = libaarhusxyz.GEX(gexfile) if gexfile is not None else None

libaarhusxyz.Survey.__init__(self, xyz, gex)

importers = {entry.name: entry for entry in importlib.metadata.entry_points()["beryl_pipeline.import"]}

class Import(poltergust_luigi_utils.logging_task.LoggingTask, luigi.Task):
import_name = luigi.Parameter()
logging_formatter_yaml = True

def __init__(self, *arg, **kw):
luigi.Task.__init__(self, *arg, **kw)
self._log = []

def config_target(self):
return poltergust_luigi_utils.caching.CachingOpenerTarget('%s/config.yml' % (self.import_name,))

def run(self):
with self.logging():
self.log("Read config")

with self.config_target().open("r") as f:
config = yaml.load(f, Loader=yaml.SafeLoader)

self.log("Download files")

with localize.localize(config) as config:
with localize.upload_directory(self.import_name) as tempdir:
self.log("Import data")

importer_fn = importers[config["importer"]["name"]].load()
importer = importer_fn(**config["importer"].get("args", {}))

self.log("Write and upload data")
importer.dump(
xyzfile = '%s/out.xyz' % (tempdir,),
gexfile = '%s/out.gex' % (tempdir,),
msgpackfile = '%s/out.msgpack' % (tempdir,),
summaryfile = '%s/out.summary.yml' % (tempdir,),
geojsonfile = '%s/out.geojson' % (tempdir,))

for fline, line_data in importer.xyz.split_by_line().items():
fline = slugify.slugify(str(fline), separator="_")
line_importer = copy.copy(importer)
line_importer.xyz = line_data
line_importer.dump(
xyzfile = '%s/out.%s.xyz' % (tempdir, fline),
gexfile = '%s/out.%s.gex' % (tempdir, fline),
msgpackfile = '%s/out.%s.msgpack' % (tempdir, fline),
summaryfile = '%s/out.%s.summary.yml' % (tempdir, fline),
geojsonfile = '%s/out.%s.geojson' % (tempdir, fline))

with self.output().open("w") as f:
f.write("DONE")

def logfile(self):
return poltergust_luigi_utils.caching.CachingOpenerTarget(
'%s/log.yml' % (self.import_name,))

def system_data(self):
return poltergust_luigi_utils.caching.CachingOpenerTarget(
'%s/out.gex' % (self.import_name,),
format=luigi.format.NopFormat())

def data(self):
return poltergust_luigi_utils.caching.CachingOpenerTarget(
'%s/out.xyz' % (self.import_name,),
format=luigi.format.NopFormat())

def data_msgpack(self):
return poltergust_luigi_utils.caching.CachingOpenerTarget(
'%s/out.msgpack' % (self.import_name,),
format=luigi.format.NopFormat())

def summary(self):
return poltergust_luigi_utils.caching.CachingOpenerTarget(
'%s/out.summary.yml' % (self.import_name,))

def fl_data(self, fline):
return poltergust_luigi_utils.caching.CachingOpenerTarget(
f'{self.import_name}/out.{fline}.xyz',
format=luigi.format.NopFormat())

def fl_data_msgpack(self, fline):
return poltergust_luigi_utils.caching.CachingOpenerTarget(
f'{self.import_name}/out.{fline}.msgpack',
format=luigi.format.NopFormat())

def fl_summary(self, fline):
return poltergust_luigi_utils.caching.CachingOpenerTarget(
f'{self.import_name}/out.{fline}.summary.yml')

def output(self):
return poltergust_luigi_utils.caching.CachingOpenerTarget('%s/DONE' % (self.import_name,))
92 changes: 92 additions & 0 deletions beryl_pipeline/integration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
import luigi
import poltergust_luigi_utils.caching
import yaml
from . import file_import
from . import processing
from . import inversion

class IntegrationStep(luigi.Task):
integration_name = luigi.Parameter()
step = luigi.Parameter()

def requires(self):
if self.step == "inversion":
return IntegrationStep(
integration_name=self.integration_name,
step="processing")
elif self.step == "processing":
return IntegrationStep(
integration_name=self.integration_name,
step="import")
else:
return None

def run(self):
yield self.subtask()

def subtask(self):
if self.step == "inversion":
return inversion.Inversion(inversion_name=self.integration_name + "/inversion")
elif self.step == "processing":
return processing.Processing(processing_name=self.integration_name + "/processing")
elif self.step == "import":
return file_import.Import(import_name=self.integration_name + "/import")
else:
assert False, 'Unknown step %s' % (self.step,)

def output(self):
return self.subtask().output()

class Integration(luigi.Task):
integration_name = luigi.Parameter()

def config_target(self):
return poltergust_luigi_utils.caching.CachingOpenerTarget('%s/config.yml' % (self.integration_name,))

def subtask(self):
return IntegrationStep(
integration_name=self.integration_name,
step="inversion")

def run(self):
subtask = self.subtask()
inv = subtask.subtask()
pro = subtask.requires().subtask()
imp = subtask.requires().requires().subtask()

with self.config_target().open("r") as f:
config = yaml.load(f, Loader=yaml.SafeLoader)

print("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA")
print()
print()
print("IMP", imp, imp.config_target().url)
print("PRO", pro, pro.config_target().url)
print("INV", inv, inv.config_target().url)
print()
print()
print("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA")

with imp.config_target().open("w") as f:
yaml.dump(config["importer"], f)

with pro.config_target().open("w") as f:
config["processing"]["data"] = {
"name": "emeraldprocessing.pipeline.ProcessingData",
"args": {
"data": imp.data().url,
"sidecar": None,
"system_data": imp.system_data().url,
}
}
yaml.dump(config["processing"], f)

with inv.config_target().open("w") as f:
config["inversion"]["data"] = pro.data().url
config["inversion"]["system_data"] = pro.system_data().url
yaml.dump(config["inversion"], f)

yield subtask

def output(self):
return self.subtask().output()
Loading

0 comments on commit 9e62218

Please sign in to comment.