Skip to content

Commit

Permalink
Merge pull request #9 from shashank-boyapally/fmatch
Browse files Browse the repository at this point in the history
Github action to automate publish of package when release is cut
  • Loading branch information
shashank-boyapally authored Jan 16, 2024
2 parents 87528de + 17d024e commit 506f606
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 10 deletions.
35 changes: 35 additions & 0 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
name: Publish to PyPI

on:
release:
types:
- created

jobs:
publish:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.x
pip-options: '--upgrade setuptools wheel'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools
pip install build
pip install twine
- name: Build and publish
run: |
python -m build
python -m twine upload -r pypi dist/*
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,5 @@
# py-commons
Common Python Library

## Notes
For each release of the fmatch package, it is imperative to update the version in the setup.py file located in the base directory. This practice ensures a smooth publishing process on PyPI, mitigating potential conflicts. PyPI imposes restrictions on re-publishing a package with the same version, even if it has been deleted from the repository. Therefore, maintaining accurate versioning in the setup.py file is essential for hassle-free PyPI publication.
16 changes: 8 additions & 8 deletions fmatch/matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class Matcher:
def __init__(self, index="perf_scale_ci", level=logging.INFO):
self.index = index
self.es_url = ES_URL
self.searchSize = 10000
self.search_size = 10000
self.logger = logging.getLogger("Matcher")
self.logger.setLevel(level)
handler = logging.StreamHandler(sys.stdout)
Expand Down Expand Up @@ -94,7 +94,7 @@ def get_uuid_by_metadata(self, meta, index=None):
]
}
},
"size": self.searchSize
"size": self.search_size
}
result = self.query_index(index, query)
hits = result.get('hits', {}).get('hits', [])
Expand All @@ -117,7 +117,7 @@ def match_k8s_netperf(self, uuids):
)
}
},
"size": self.searchSize
"size": self.search_size
}
result = self.query_index(index, query)
runs = [item['_source'] for item in result["hits"]["hits"]]
Expand All @@ -141,7 +141,7 @@ def match_kube_burner(self, uuids):
)
}
},
"size": self.searchSize
"size": self.search_size
}
result = self.query_index(index, query)
runs = [item['_source'] for item in result["hits"]["hits"]]
Expand Down Expand Up @@ -189,7 +189,7 @@ def burner_results(self, uuid, uuids, index):
)
}
},
"size": self.searchSize
"size": self.search_size
}
result = self.query_index(index, query)
runs = [item['_source'] for item in result["hits"]["hits"]]
Expand All @@ -210,7 +210,7 @@ def burner_cpu_results(self, uuids, namespace, index):
"time": {
"terms": {
"field": "uuid.keyword",
"size": self.searchSize
"size": self.search_size
},
"aggs": {
"time": {
Expand All @@ -222,7 +222,7 @@ def burner_cpu_results(self, uuids, namespace, index):
"uuid": {
"terms": {
"field": "uuid.keyword",
"size": self.searchSize
"size": self.search_size
},
"aggs": {
"cpu": {
Expand All @@ -246,7 +246,7 @@ def burner_cpu_results(self, uuids, namespace, index):
}]
}
},
"size": self.searchSize
"size": self.search_size
}
runs = self.query_index(index, query)
data = self.parse_burner_cpu_results(runs)
Expand Down
5 changes: 4 additions & 1 deletion fmatch/test_fmatch.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@
meta['jobStatus'] = "success"
meta['ocpVersion'] = '4.15'
meta['networkType'] = "OVNKubernetes"
meta['encrypted'] = "true"
meta['ipsec'] = "false"
meta['fips'] = "false"

uuids = match.get_uuid_by_metadata(meta)
if len(uuids) == 0:
Expand Down Expand Up @@ -49,4 +52,4 @@
# Check merged csv data - Debug
for i in ls:
# Debug - Ensure they are all using the same networkType
print(match.get_metadata_by_uuid(i)["networkType"])
print(match.get_metadata_by_uuid(i)['networkType'])
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from setuptools import setup, find_packages


VERSION = '0.0.1'
VERSION = '0.0.2'
DESCRIPTION = 'Common package for matching runs with provided metadata'
# pylint: disable= line-too-long
LONG_DESCRIPTION = "A package that allows to match metadata and get runs and create csv files with queried metrics"
Expand Down

0 comments on commit 506f606

Please sign in to comment.