From f7ad610c3be0e94441204401ce86e04c1d1a3257 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 13:39:52 +0800 Subject: [PATCH 01/35] Move common functions to utils --- tests/model_tests/chestnut_dec_may/train.py | 157 ++------------------ tests/model_tests/utils.py | 135 +++++++++++++++++ 2 files changed, 151 insertions(+), 141 deletions(-) diff --git a/tests/model_tests/chestnut_dec_may/train.py b/tests/model_tests/chestnut_dec_may/train.py index de390e6d..299eb708 100644 --- a/tests/model_tests/chestnut_dec_may/train.py +++ b/tests/model_tests/chestnut_dec_may/train.py @@ -9,7 +9,6 @@ import lightning as pl import numpy as np -import torch import wandb from lightning.pytorch.callbacks import ( LearningRateMonitor, @@ -17,150 +16,19 @@ EarlyStopping, ) from lightning.pytorch.loggers import WandbLogger -from matplotlib import pyplot as plt -from seaborn import heatmap -from sklearn.metrics import confusion_matrix from sklearn.preprocessing import StandardScaler, OrdinalEncoder -from torch.utils.data import DataLoader -from torchvision.transforms import RandomVerticalFlip -from torchvision.transforms.v2 import ( - Compose, - ToImage, - ToDtype, - RandomVerticalFlip, - RandomCrop, - CenterCrop, -) -from torchvision.transforms.v2 import RandomHorizontalFlip from frdc.load import FRDCDataset from frdc.load.dataset import FRDCUnlabelledDataset from frdc.models.inceptionv3 import InceptionV3MixMatchModule from frdc.train.frdc_datamodule import FRDCDataModule - -THIS_DIR = Path(__file__).parent - - -# TODO: Ideally, we should have a separate dataset for testing. - - -# TODO: This is pretty hacky, I'm not sure if there's a better way to do this. -# Note that initializing datasets separately then concatenating them -# together is 4x slower than initializing a dataset then hacking into -# the __getitem__ method. -class FRDCDatasetFlipped(FRDCDataset): - def __len__(self): - """Assume that the dataset is 4x larger than it actually is. - - For example, for index 0, we return the original image. For index 1, we - return the horizontally flipped image and so on, until index 3. - Then, return the next image for index 4, and so on. - """ - return super().__len__() * 4 - - def __getitem__(self, idx): - """Alter the getitem method to implement the logic above.""" - x, y = super().__getitem__(int(idx // 4)) - if idx % 4 == 0: - return x, y - elif idx % 4 == 1: - return RandomHorizontalFlip(p=1)(x), y - elif idx % 4 == 2: - return RandomVerticalFlip(p=1)(x), y - elif idx % 4 == 3: - return RandomHorizontalFlip(p=1)(RandomVerticalFlip(p=1)(x)), y - - -def evaluate(ckpt_pth: Path | str | None = None) -> tuple[plt.Figure, float]: - ds = FRDCDatasetFlipped( - "chestnut_nature_park", - "20210510", - "90deg43m85pct255deg", - transform=preprocess, - ) - - if ckpt_pth is None: - # This fetches all possible checkpoints and gets the latest one - ckpt_pth = sorted( - THIS_DIR.glob("**/*.ckpt"), key=lambda x: x.stat().st_mtime_ns - )[-1] - - m = InceptionV3MixMatchModule.load_from_checkpoint(ckpt_pth) - # Make predictions - trainer = pl.Trainer(logger=False) - pred = trainer.predict(m, dataloaders=DataLoader(ds, batch_size=32)) - - y_trues = [] - y_preds = [] - for y_true, y_pred in pred: - y_trues.append(y_true) - y_preds.append(y_pred) - y_trues = np.concatenate(y_trues) - y_preds = np.concatenate(y_preds) - acc = (y_trues == y_preds).mean() - - # Plot the confusion matrix - cm = confusion_matrix(y_trues, y_preds) - - plt.figure(figsize=(10, 10)) - - heatmap( - cm, - annot=True, - xticklabels=m.y_encoder.categories_[0], - yticklabels=m.y_encoder.categories_[0], - cbar=False, - ) - plt.title(f"Accuracy: {acc:.2%}") - plt.tight_layout(pad=3) - plt.xlabel("Predicted Label") - plt.ylabel("True Label") - return plt.gcf(), acc - - -def preprocess(x): - return Compose( - [ - ToImage(), - ToDtype(torch.float32, scale=True), - CenterCrop( - [ - InceptionV3MixMatchModule.MIN_SIZE, - InceptionV3MixMatchModule.MIN_SIZE, - ], - ), - ] - )(x) - - -def train_preprocess(x): - return Compose( - [ - ToImage(), - ToDtype(torch.float32, scale=True), - RandomCrop( - [ - InceptionV3MixMatchModule.MIN_SIZE, - InceptionV3MixMatchModule.MIN_SIZE, - ], - pad_if_needed=True, - padding_mode="constant", - fill=0, - ), - RandomHorizontalFlip(), - RandomVerticalFlip(), - ] - )(x) - - -def train_unl_preprocess(n_aug: int = 2): - def f(x): - # This simulates the n_aug of MixMatch - return ( - [train_preprocess(x) for _ in range(n_aug)] if n_aug > 0 else None - ) - - return f +from model_tests.utils import ( + train_preprocess, + train_unl_preprocess, + preprocess, + evaluate, + FRDCDatasetFlipped, +) def main( @@ -250,7 +118,15 @@ def main( f"[WandB Report]({run.get_url()})" ) - fig, acc = evaluate(Path(ckpt.best_model_path)) + fig, acc = evaluate( + ds=FRDCDatasetFlipped( + "chestnut_nature_park", + "20210510", + "90deg43m85pct255deg", + transform=preprocess, + ), + ckpt_pth=Path(ckpt.best_model_path), + ) wandb.log({"confusion_matrix": wandb.Image(fig)}) wandb.log({"eval_accuracy": acc}) @@ -263,7 +139,6 @@ def main( TRAIN_ITERS = 25 VAL_ITERS = 15 LR = 1e-3 - os.environ["GOOGLE_CLOUD_PROJECT"] = "frmodel" assert wandb.run is None wandb.setup(wandb.Settings(program=__name__, program_relpath=__name__)) diff --git a/tests/model_tests/utils.py b/tests/model_tests/utils.py index 22640115..593bae11 100644 --- a/tests/model_tests/utils.py +++ b/tests/model_tests/utils.py @@ -1 +1,136 @@ +from pathlib import Path + +import lightning as pl +import numpy as np +import torch +from matplotlib import pyplot as plt +from seaborn import heatmap +from sklearn.metrics import confusion_matrix +from torch.utils.data import DataLoader +from torchvision.transforms import RandomVerticalFlip +from torchvision.transforms.v2 import ( + Compose, + ToImage, + ToDtype, + RandomVerticalFlip, + RandomCrop, + CenterCrop, +) +from torchvision.transforms.v2 import RandomHorizontalFlip + +from frdc.load import FRDCDataset +from frdc.models.inceptionv3 import InceptionV3MixMatchModule + +THIS_DIR = Path(__file__).parent + BANDS = ["NB", "NG", "NR", "RE", "NIR"] + + +class FRDCDatasetFlipped(FRDCDataset): + def __len__(self): + """Assume that the dataset is 4x larger than it actually is. + + For example, for index 0, we return the original image. For index 1, we + return the horizontally flipped image and so on, until index 3. + Then, return the next image for index 4, and so on. + """ + return super().__len__() * 4 + + def __getitem__(self, idx): + """Alter the getitem method to implement the logic above.""" + x, y = super().__getitem__(int(idx // 4)) + if idx % 4 == 0: + return x, y + elif idx % 4 == 1: + return RandomHorizontalFlip(p=1)(x), y + elif idx % 4 == 2: + return RandomVerticalFlip(p=1)(x), y + elif idx % 4 == 3: + return RandomHorizontalFlip(p=1)(RandomVerticalFlip(p=1)(x)), y + + +def evaluate( + ds: FRDCDataset, ckpt_pth: Path | str | None = None +) -> tuple[plt.Figure, float]: + if ckpt_pth is None: + # This fetches all possible checkpoints and gets the latest one + ckpt_pth = sorted( + THIS_DIR.glob("**/*.ckpt"), key=lambda x: x.stat().st_mtime_ns + )[-1] + + m = InceptionV3MixMatchModule.load_from_checkpoint(ckpt_pth) + # Make predictions + trainer = pl.Trainer(logger=False) + pred = trainer.predict(m, dataloaders=DataLoader(ds, batch_size=32)) + + y_trues = [] + y_preds = [] + for y_true, y_pred in pred: + y_trues.append(y_true) + y_preds.append(y_pred) + y_trues = np.concatenate(y_trues) + y_preds = np.concatenate(y_preds) + acc = (y_trues == y_preds).mean() + + # Plot the confusion matrix + cm = confusion_matrix(y_trues, y_preds) + + plt.figure(figsize=(10, 10)) + + heatmap( + cm, + annot=True, + xticklabels=m.y_encoder.categories_[0], + yticklabels=m.y_encoder.categories_[0], + cbar=False, + ) + plt.title(f"Accuracy: {acc:.2%}") + plt.tight_layout(pad=3) + plt.xlabel("Predicted Label") + plt.ylabel("True Label") + return plt.gcf(), acc + + +def preprocess(x): + return Compose( + [ + ToImage(), + ToDtype(torch.float32, scale=True), + CenterCrop( + [ + InceptionV3MixMatchModule.MIN_SIZE, + InceptionV3MixMatchModule.MIN_SIZE, + ], + ), + ] + )(x) + + +def train_preprocess(x): + return Compose( + [ + ToImage(), + ToDtype(torch.float32, scale=True), + RandomCrop( + [ + InceptionV3MixMatchModule.MIN_SIZE, + InceptionV3MixMatchModule.MIN_SIZE, + ], + pad_if_needed=True, + padding_mode="constant", + fill=0, + ), + RandomHorizontalFlip(), + RandomVerticalFlip(), + ] + )(x) + + +def train_unl_preprocess(n_aug: int = 2): + def f(x): + # This simulates the n_aug of MixMatch + return ( + [train_preprocess(x) for _ in range(n_aug)] if n_aug > 0 else None + ) + + return f From 01b9849098838cc3e824543370316272a446e38d Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 13:40:00 +0800 Subject: [PATCH 02/35] Suppress gcs environ warning --- src/frdc/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/frdc/conf.py b/src/frdc/conf.py index bb3716c1..c94806fe 100644 --- a/src/frdc/conf.py +++ b/src/frdc/conf.py @@ -9,6 +9,7 @@ ROOT_DIR = Path(__file__).parents[2] LOCAL_DATASET_ROOT_DIR = ROOT_DIR / "rsc" +os.environ["GOOGLE_CLOUD_PROJECT"] = "frmodel" GCS_PROJECT_ID = "frmodel" GCS_BUCKET_NAME = "frdc-ds" GCS_CREDENTIALS = None From 643a98d88417a75037f65cb5de8b2955dabef677 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 14:02:25 +0800 Subject: [PATCH 03/35] Fix missing label-studio-sdk --- poetry.lock | 647 +++++++++++++++++++++++++++++++++++-------------- pyproject.toml | 1 + 2 files changed, 471 insertions(+), 177 deletions(-) diff --git a/poetry.lock b/poetry.lock index 39ed7d6a..5775a26d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + [[package]] name = "appdirs" version = "1.4.4" @@ -13,29 +24,33 @@ files = [ [[package]] name = "black" -version = "23.11.0" +version = "23.12.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, + {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"}, + {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"}, + {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"}, + {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"}, + {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"}, + {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"}, + {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"}, + {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"}, + {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"}, + {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"}, + {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"}, + {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"}, + {file = "black-23.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcf91b01ddd91a2fed9a8006d7baa94ccefe7e518556470cf40213bd3d44bbbc"}, + {file = "black-23.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:996650a89fe5892714ea4ea87bc45e41a59a1e01675c42c433a35b490e5aa3f0"}, + {file = "black-23.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbff34c487239a63d86db0c9385b27cdd68b1bfa4e706aa74bb94a435403672"}, + {file = "black-23.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:97af22278043a6a1272daca10a6f4d36c04dfa77e61cbaaf4482e08f3640e9f0"}, + {file = "black-23.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ead25c273adfad1095a8ad32afdb8304933efba56e3c1d31b0fee4143a1e424a"}, + {file = "black-23.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c71048345bdbced456cddf1622832276d98a710196b842407840ae8055ade6ee"}, + {file = "black-23.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a832b6e00eef2c13b3239d514ea3b7d5cc3eaa03d0474eedcbbda59441ba5d"}, + {file = "black-23.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:6a82a711d13e61840fb11a6dfecc7287f2424f1ca34765e70c909a35ffa7fb95"}, + {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"}, + {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"}, ] [package.dependencies] @@ -49,7 +64,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -275,31 +290,34 @@ test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] [[package]] name = "cupy-cuda12x" -version = "12.2.0" +version = "12.3.0" description = "CuPy: NumPy & SciPy for GPU" optional = false python-versions = ">=3.8" files = [ - {file = "cupy_cuda12x-12.2.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:350cc1591d5af25aaf147974547a68f25eb9104b0fcd5fa3c89f32f4d42b88c7"}, - {file = "cupy_cuda12x-12.2.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:bfcea96e5506193ea8672a8c8a3e164d023c4860e58f1165cdd4a946b136aa20"}, - {file = "cupy_cuda12x-12.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a41ed8bece4dc2344e2afb1976690adf7ad3f9ef0a169653b5c9466e4768450"}, - {file = "cupy_cuda12x-12.2.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:3e86fe1d41009418d3f2878e6f4f713a28d29a7faaa47c089f8ac05851087e9e"}, - {file = "cupy_cuda12x-12.2.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:ddf743881d85e98e1ac46328f78100a5abe842793aa1fd575301c81df591e9a2"}, - {file = "cupy_cuda12x-12.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:77386c53ddb5040f5cd9daa6764e3e3edc76f71b074b9a9bebec76f5da75cfa8"}, - {file = "cupy_cuda12x-12.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:98277c47425cb59cb623fcd94fec4dfc77292ff1377f2fc4bd0d0e55c7dcf447"}, - {file = "cupy_cuda12x-12.2.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:c581705d444cdeeffa016055ba449322bb2a99b5416ab5b85f140ea7333a1e7c"}, - {file = "cupy_cuda12x-12.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:12d88bba2e6cae18ba48eabbb7ff23a21d073ce83047ef27a87b99414db86795"}, - {file = "cupy_cuda12x-12.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7d4e2b2ad37afd163d006a96b31b417142d95768846227513af7b596d731ba29"}, - {file = "cupy_cuda12x-12.2.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0406b98fb2f1780238de8fed0da5f14e689b016c5c1f0ddaecd41ee987cd7965"}, - {file = "cupy_cuda12x-12.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2ee5cb963bab52cc421ba09824e0ffdb7c6a394f35884094f73d2d1af927f0e4"}, + {file = "cupy_cuda12x-12.3.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:77687b019383eb89da981638cfec6d5ba556a15f1be0945e8f7f80030b851d04"}, + {file = "cupy_cuda12x-12.3.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:32d0e03789ef3f02f0c098818e957c235b75c1636e9e0036299480db0c423dcd"}, + {file = "cupy_cuda12x-12.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:6ab0d67db0ae74118b3b29c96cd79428593ee646c1e7b4f92ad76028033d3646"}, + {file = "cupy_cuda12x-12.3.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:e8a4906da820f6ce39a3a1d18c4504da4e0faad87598761ea9d6bf8288423d69"}, + {file = "cupy_cuda12x-12.3.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:49626a975c87008a11b67a43ca4b5bd00c3029e093430511c83cbda422b6a89f"}, + {file = "cupy_cuda12x-12.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:182fed8568f60208bec43b8ad011f4ab2a80f5bc428bd34672b9ef130a93f772"}, + {file = "cupy_cuda12x-12.3.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:43c54e121d9fdc92b246df438bc3f49d47a85c1562deefc9f5e16e55c27cec6a"}, + {file = "cupy_cuda12x-12.3.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:3e65f1cfd32fa5d9894681de35e57a79a8bf74e8a84e6ee3d24542ba71aaca34"}, + {file = "cupy_cuda12x-12.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c7282a51a6e74a2221285b44eb6deefac96965016b7eb4387903e5cee059bcb7"}, + {file = "cupy_cuda12x-12.3.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:5dea10a66a0e208f83ef80fca0e57f6ed227b93c138a65c6d23ff2d8350a3a33"}, + {file = "cupy_cuda12x-12.3.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:27993b46a1dd5a6b2c269ab436e0927b5eb66adb03d24b9b321850a1b4140d7a"}, + {file = "cupy_cuda12x-12.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:8fe596e792ef47bab71cc85fe0982d752357276d6eb701cfc823697d29e83257"}, + {file = "cupy_cuda12x-12.3.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:716db7567453a88795c1e157f4e9059d291f759e01d7c18df65feafb02209b31"}, + {file = "cupy_cuda12x-12.3.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1ac1312fa4f24598bced9ddd6b2cd66236c53171e9941c5ee170c5b14c7f0e3a"}, + {file = "cupy_cuda12x-12.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:3c7ba97dcb78f8b65ff7c179eacde3e3f2cd895d5fd5041c5617e717d15b2e76"}, ] [package.dependencies] fastrlock = ">=0.5" -numpy = ">=1.20,<1.27" +numpy = ">=1.20,<1.29" [package.extras] -all = ["Cython (>=0.29.22,<3)", "optuna (>=2.0)", "scipy (>=1.6,<1.13)"] +all = ["Cython (>=0.29.22,<3)", "optuna (>=2.0)", "scipy (>=1.6,<1.14)"] stylecheck = ["autopep8 (==1.5.5)", "flake8 (==3.8.4)", "mypy (==1.4.1)", "pbr (==5.5.1)", "pycodestyle (==2.6.0)", "types-setuptools (==57.4.14)"] test = ["hypothesis (>=6.37.2,<6.55.0)", "pytest (>=7.2)"] @@ -320,13 +338,13 @@ tests = ["pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] [[package]] @@ -475,59 +493,59 @@ pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "fonttools" -version = "4.46.0" +version = "4.47.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.46.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d4e69e2c7f93b695d2e6f18f709d501d945f65c1d237dafaabdd23cd935a5276"}, - {file = "fonttools-4.46.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:25852f0c63df0af022f698464a4a80f7d1d5bd974bcd22f995f6b4ad198e32dd"}, - {file = "fonttools-4.46.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adab73618d0a328b203a0e242b3eba60a2b5662d9cb2bd16ed9c52af8a7d86af"}, - {file = "fonttools-4.46.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2cf923a4a556ab4cc4c52f69a4a2db624cf5a2cf360394368b40c5152fe3321e"}, - {file = "fonttools-4.46.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:87c214197712cc14fd2a4621efce2a9c501a77041232b789568149a8a3161517"}, - {file = "fonttools-4.46.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:156ae342a1ed1fe38e180de471e98fbf5b2b6ae280fa3323138569c4ca215844"}, - {file = "fonttools-4.46.0-cp310-cp310-win32.whl", hash = "sha256:c506e3d3a9e898caee4dc094f34b49c5566870d5a2d1ca2125f0a9f35ecc2205"}, - {file = "fonttools-4.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:f8bc3973ed58893c4107993e0a7ae34901cb572b5e798249cbef35d30801ffd4"}, - {file = "fonttools-4.46.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:982f69855ac258260f51048d9e0c53c5f19881138cc7ca06deb38dc4b97404b6"}, - {file = "fonttools-4.46.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c23c59d321d62588620f2255cf951270bf637d88070f38ed8b5e5558775b86c"}, - {file = "fonttools-4.46.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0e94244ec24a940ecfbe5b31c975c8a575d5ed2d80f9a280ce3b21fa5dc9c34"}, - {file = "fonttools-4.46.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a9f9cdd7ef63d1b8ac90db335762451452426b3207abd79f60da510cea62da5"}, - {file = "fonttools-4.46.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ca9eceebe70035b057ce549e2054cad73e95cac3fe91a9d827253d1c14618204"}, - {file = "fonttools-4.46.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8be6adfa4e15977075278dd0a0bae74dec59be7b969b5ceed93fb86af52aa5be"}, - {file = "fonttools-4.46.0-cp311-cp311-win32.whl", hash = "sha256:7b5636f5706d49f13b6d610fe54ee662336cdf56b5a6f6683c0b803e23d826d2"}, - {file = "fonttools-4.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:49ea0983e55fd7586a809787cd4644a7ae471e53ab8ddc016f9093b400e32646"}, - {file = "fonttools-4.46.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7b460720ce81773da1a3e7cc964c48e1e11942b280619582a897fa0117b56a62"}, - {file = "fonttools-4.46.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8bee9f4fc8c99824a424ae45c789ee8c67cb84f8e747afa7f83b7d3cef439c3b"}, - {file = "fonttools-4.46.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3d7b96aba96e05e8c911ce2dfc5acc6a178b8f44f6aa69371ab91aa587563da"}, - {file = "fonttools-4.46.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e6aeb5c340416d11a3209d75c48d13e72deea9e1517837dd1522c1fd1f17c11"}, - {file = "fonttools-4.46.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c779f8701deedf41908f287aeb775b8a6f59875ad1002b98ac6034ae4ddc1b7b"}, - {file = "fonttools-4.46.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce199227ce7921eaafdd4f96536f16b232d6b580ce74ce337de544bf06cb2752"}, - {file = "fonttools-4.46.0-cp312-cp312-win32.whl", hash = "sha256:1c9937c4dd1061afd22643389445fabda858af5e805860ec3082a4bc07c7a720"}, - {file = "fonttools-4.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:a9fa52ef8fd14d7eb3d813e1451e7ace3e1eebfa9b7237d3f81fee8f3de6a114"}, - {file = "fonttools-4.46.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c94564b1f3b5dd87e73577610d85115b1936edcc596deaf84a31bbe70e17456b"}, - {file = "fonttools-4.46.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4a50a1dfad7f7ba5ca3f99cc73bf5cdac67ceade8e4b355a877521f20ad1b63"}, - {file = "fonttools-4.46.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89c2c520f9492844ecd6316d20c6c7a157b5c0cb73a1411b3db28ee304f30122"}, - {file = "fonttools-4.46.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5b7905fd68eacb7cc56a13139da5c312c45baae6950dd00b02563c54508a041"}, - {file = "fonttools-4.46.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8485cc468288e213f31afdaf1fdda3c79010f542559fbba936a54f4644df2570"}, - {file = "fonttools-4.46.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:87c3299da7da55394fb324349db0ede38114a46aafd0e7dfcabfecd28cdd94c3"}, - {file = "fonttools-4.46.0-cp38-cp38-win32.whl", hash = "sha256:f5f1423a504ccc329efb5aa79738de83d38c072be5308788dde6bd419969d7f5"}, - {file = "fonttools-4.46.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d4a4ebcc76e30898ff3296ea786491c70e183f738319ae2629e0d44f17ece42"}, - {file = "fonttools-4.46.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9a0e422ab79e5cb2b47913be6a4b5fd20c4c7ac34a24f3691a4e099e965e0b8"}, - {file = "fonttools-4.46.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:13ac0cba2fc63fa4b232f2a7971f35f35c6eaf10bd1271fa96d4ce6253a8acfd"}, - {file = "fonttools-4.46.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:795150d5edc595e1a2cfb3d65e8f4f3d027704fc2579f8990d381bef6b188eb6"}, - {file = "fonttools-4.46.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d00fc63131dcac6b25f50a5a129758438317e54e3ce5587163f7058de4b0e933"}, - {file = "fonttools-4.46.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3033b55f401a622de2630b3982234d97219d89b058607b87927eccb0f922313c"}, - {file = "fonttools-4.46.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e26e7fb908ae4f622813e7cb32cd2db6c24e3122bb3b98f25e832a2fe0e7e228"}, - {file = "fonttools-4.46.0-cp39-cp39-win32.whl", hash = "sha256:2d0eba685938c603f2f648dfc0aadbf8c6a4fe1c7ca608c2970a6ef39e00f254"}, - {file = "fonttools-4.46.0-cp39-cp39-win_amd64.whl", hash = "sha256:5200b01f463d97cc2b7ff8a1e3584151f4413e98cb8419da5f17d1dbb84cc214"}, - {file = "fonttools-4.46.0-py3-none-any.whl", hash = "sha256:5b627ed142398ea9202bd752c04311592558964d1a765fb2f78dc441a05633f4"}, - {file = "fonttools-4.46.0.tar.gz", hash = "sha256:2ae45716c27a41807d58a9f3f59983bdc8c0a46cb259e4450ab7e196253a9853"}, + {file = "fonttools-4.47.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d2404107626f97a221dc1a65b05396d2bb2ce38e435f64f26ed2369f68675d9"}, + {file = "fonttools-4.47.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c01f409be619a9a0f5590389e37ccb58b47264939f0e8d58bfa1f3ba07d22671"}, + {file = "fonttools-4.47.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d986b66ff722ef675b7ee22fbe5947a41f60a61a4da15579d5e276d897fbc7fa"}, + {file = "fonttools-4.47.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8acf6dd0434b211b3bd30d572d9e019831aae17a54016629fa8224783b22df8"}, + {file = "fonttools-4.47.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:495369c660e0c27233e3c572269cbe520f7f4978be675f990f4005937337d391"}, + {file = "fonttools-4.47.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c59227d7ba5b232281c26ae04fac2c73a79ad0e236bca5c44aae904a18f14faf"}, + {file = "fonttools-4.47.0-cp310-cp310-win32.whl", hash = "sha256:59a6c8b71a245800e923cb684a2dc0eac19c56493e2f896218fcf2571ed28984"}, + {file = "fonttools-4.47.0-cp310-cp310-win_amd64.whl", hash = "sha256:52c82df66201f3a90db438d9d7b337c7c98139de598d0728fb99dab9fd0495ca"}, + {file = "fonttools-4.47.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:854421e328d47d70aa5abceacbe8eef231961b162c71cbe7ff3f47e235e2e5c5"}, + {file = "fonttools-4.47.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:511482df31cfea9f697930f61520f6541185fa5eeba2fa760fe72e8eee5af88b"}, + {file = "fonttools-4.47.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0e2c88c8c985b7b9a7efcd06511fb0a1fe3ddd9a6cd2895ef1dbf9059719d7"}, + {file = "fonttools-4.47.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7a0a8848726956e9d9fb18c977a279013daadf0cbb6725d2015a6dd57527992"}, + {file = "fonttools-4.47.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e869da810ae35afb3019baa0d0306cdbab4760a54909c89ad8904fa629991812"}, + {file = "fonttools-4.47.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dd23848f877c3754f53a4903fb7a593ed100924f9b4bff7d5a4e2e8a7001ae11"}, + {file = "fonttools-4.47.0-cp311-cp311-win32.whl", hash = "sha256:bf1810635c00f7c45d93085611c995fc130009cec5abdc35b327156aa191f982"}, + {file = "fonttools-4.47.0-cp311-cp311-win_amd64.whl", hash = "sha256:61df4dee5d38ab65b26da8efd62d859a1eef7a34dcbc331299a28e24d04c59a7"}, + {file = "fonttools-4.47.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e3f4d61f3a8195eac784f1d0c16c0a3105382c1b9a74d99ac4ba421da39a8826"}, + {file = "fonttools-4.47.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:174995f7b057e799355b393e97f4f93ef1f2197cbfa945e988d49b2a09ecbce8"}, + {file = "fonttools-4.47.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea592e6a09b71cb7a7661dd93ac0b877a6228e2d677ebacbad0a4d118494c86d"}, + {file = "fonttools-4.47.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40bdbe90b33897d9cc4a39f8e415b0fcdeae4c40a99374b8a4982f127ff5c767"}, + {file = "fonttools-4.47.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:843509ae9b93db5aaf1a6302085e30bddc1111d31e11d724584818f5b698f500"}, + {file = "fonttools-4.47.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9acfa1cdc479e0dde528b61423855913d949a7f7fe09e276228298fef4589540"}, + {file = "fonttools-4.47.0-cp312-cp312-win32.whl", hash = "sha256:66c92ec7f95fd9732550ebedefcd190a8d81beaa97e89d523a0d17198a8bda4d"}, + {file = "fonttools-4.47.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8fa20748de55d0021f83754b371432dca0439e02847962fc4c42a0e444c2d78"}, + {file = "fonttools-4.47.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c75e19971209fbbce891ebfd1b10c37320a5a28e8d438861c21d35305aedb81c"}, + {file = "fonttools-4.47.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e79f1a3970d25f692bbb8c8c2637e621a66c0d60c109ab48d4a160f50856deff"}, + {file = "fonttools-4.47.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:562681188c62c024fe2c611b32e08b8de2afa00c0c4e72bed47c47c318e16d5c"}, + {file = "fonttools-4.47.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a77a60315c33393b2bd29d538d1ef026060a63d3a49a9233b779261bad9c3f71"}, + {file = "fonttools-4.47.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4fabb8cc9422efae1a925160083fdcbab8fdc96a8483441eb7457235df625bd"}, + {file = "fonttools-4.47.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2a78dba8c2a1e9d53a0fb5382979f024200dc86adc46a56cbb668a2249862fda"}, + {file = "fonttools-4.47.0-cp38-cp38-win32.whl", hash = "sha256:e6b968543fde4119231c12c2a953dcf83349590ca631ba8216a8edf9cd4d36a9"}, + {file = "fonttools-4.47.0-cp38-cp38-win_amd64.whl", hash = "sha256:4a9a51745c0439516d947480d4d884fa18bd1458e05b829e482b9269afa655bc"}, + {file = "fonttools-4.47.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:62d8ddb058b8e87018e5dc26f3258e2c30daad4c87262dfeb0e2617dd84750e6"}, + {file = "fonttools-4.47.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5dde0eab40faaa5476133123f6a622a1cc3ac9b7af45d65690870620323308b4"}, + {file = "fonttools-4.47.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4da089f6dfdb822293bde576916492cd708c37c2501c3651adde39804630538"}, + {file = "fonttools-4.47.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:253bb46bab970e8aae254cebf2ae3db98a4ef6bd034707aa68a239027d2b198d"}, + {file = "fonttools-4.47.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1193fb090061efa2f9e2d8d743ae9850c77b66746a3b32792324cdce65784154"}, + {file = "fonttools-4.47.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:084511482dd265bce6dca24c509894062f0117e4e6869384d853f46c0e6d43be"}, + {file = "fonttools-4.47.0-cp39-cp39-win32.whl", hash = "sha256:97620c4af36e4c849e52661492e31dc36916df12571cb900d16960ab8e92a980"}, + {file = "fonttools-4.47.0-cp39-cp39-win_amd64.whl", hash = "sha256:e77bdf52185bdaf63d39f3e1ac3212e6cfa3ab07d509b94557a8902ce9c13c82"}, + {file = "fonttools-4.47.0-py3-none-any.whl", hash = "sha256:d6477ba902dd2d7adda7f0fd3bfaeb92885d45993c9e1928c9f28fc3961415f7"}, + {file = "fonttools-4.47.0.tar.gz", hash = "sha256:ec13a10715eef0e031858c1c23bfaee6cba02b97558e4a7bfa089dba4a8c2ebf"}, ] [package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "scipy"] +interpolatable = ["munkres", "pycairo", "scipy"] lxml = ["lxml (>=4.0,<5)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] @@ -586,13 +604,13 @@ tqdm = ">=4.66.1,<5.0.0" [[package]] name = "google-api-core" -version = "2.14.0" +version = "2.15.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.14.0.tar.gz", hash = "sha256:5368a4502b793d9bbf812a5912e13e4e69f9bd87f6efb508460c43f5bbd1ce41"}, - {file = "google_api_core-2.14.0-py3-none-any.whl", hash = "sha256:de2fb50ed34d47ddbb2bd2dcf680ee8fead46279f4ed6b16de362aca23a18952"}, + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, ] [package.dependencies] @@ -608,13 +626,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.24.0" +version = "2.25.2" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.24.0.tar.gz", hash = "sha256:2ec7b2a506989d7dbfdbe81cb8d0ead8876caaed14f86d29d34483cbe99c57af"}, - {file = "google_auth-2.24.0-py2.py3-none-any.whl", hash = "sha256:9b82d5c8d3479a5391ea0a46d81cca698d328459da31d4a459d4e901a5d927e0"}, + {file = "google-auth-2.25.2.tar.gz", hash = "sha256:42f707937feb4f5e5a39e6c4f343a17300a459aaf03141457ba505812841cc40"}, + {file = "google_auth-2.25.2-py2.py3-none-any.whl", hash = "sha256:473a8dfd0135f75bb79d878436e568f2695dce456764bf3a02b6f8c540b1d256"}, ] [package.dependencies] @@ -631,13 +649,13 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-cloud-core" -version = "2.3.3" +version = "2.4.1" description = "Google Cloud API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"}, - {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"}, + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, ] [package.dependencies] @@ -645,17 +663,17 @@ google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)"] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-storage" -version = "2.13.0" +version = "2.14.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.13.0.tar.gz", hash = "sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7"}, - {file = "google_cloud_storage-2.13.0-py2.py3-none-any.whl", hash = "sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d"}, + {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, + {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, ] [package.dependencies] @@ -751,13 +769,13 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.6.0" +version = "2.7.0" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">= 3.7" files = [ - {file = "google-resumable-media-2.6.0.tar.gz", hash = "sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7"}, - {file = "google_resumable_media-2.6.0-py2.py3-none-any.whl", hash = "sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b"}, + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, ] [package.dependencies] @@ -769,13 +787,13 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] name = "googleapis-common-protos" -version = "1.61.0" +version = "1.62.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.61.0.tar.gz", hash = "sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b"}, - {file = "googleapis_common_protos-1.61.0-py2.py3-none-any.whl", hash = "sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0"}, + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, ] [package.dependencies] @@ -786,13 +804,13 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "identify" -version = "2.5.32" +version = "2.5.33" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.32-py2.py3-none-any.whl", hash = "sha256:0b7656ef6cba81664b783352c73f8c24b39cf82f926f78f4550eda928e5e0545"}, - {file = "identify-2.5.32.tar.gz", hash = "sha256:5d9979348ec1a21c768ae07e0a652924538e8bce67313a73cb0f681cf08ba407"}, + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, ] [package.extras] @@ -811,13 +829,13 @@ files = [ [[package]] name = "imageio" -version = "2.33.0" +version = "2.33.1" description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." optional = false python-versions = ">=3.8" files = [ - {file = "imageio-2.33.0-py3-none-any.whl", hash = "sha256:d580d6576d0ae39c459a444a23f6f61fe72123a3df2264f5fce8c87784a4be2e"}, - {file = "imageio-2.33.0.tar.gz", hash = "sha256:39999d05eb500089e60be467dd7d618f56e142229b44c3961c2b420eeb538d7e"}, + {file = "imageio-2.33.1-py3-none-any.whl", hash = "sha256:c5094c48ccf6b2e6da8b4061cd95e1209380afafcbeae4a4e280938cce227e1d"}, + {file = "imageio-2.33.1.tar.gz", hash = "sha256:78722d40b137bd98f5ec7312119f8aea9ad2049f76f434748eb306b6937cc1ce"}, ] [package.dependencies] @@ -994,6 +1012,38 @@ files = [ {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, ] +[[package]] +name = "label-studio-sdk" +version = "0.0.32" +description = "Label Studio annotation tool" +optional = false +python-versions = ">=3.6" +files = [ + {file = "label-studio-sdk-0.0.32.tar.gz", hash = "sha256:83ba94157f3849748f1c7b9d56a8f708db3c3f8dc124af4c9e7abf1eddc295c2"}, + {file = "label_studio_sdk-0.0.32-py3-none-any.whl", hash = "sha256:d624b7d6aab36918f4b46ef294ac8bd93d76be5c2b9556981fc9244f02effbbd"}, +] + +[package.dependencies] +label-studio-tools = ">=0.0.1" +lxml = ">=4.2.5" +pydantic = ">1.7,<3" +requests = ">=2.22.0,<3" + +[[package]] +name = "label-studio-tools" +version = "0.0.3" +description = "Label studio common tools" +optional = false +python-versions = ">=3.6" +files = [ + {file = "label-studio-tools-0.0.3.tar.gz", hash = "sha256:2e31698f76293e0b5d83efa2b9bde60e6e6f0e96e58da482b1fc062a1bbfe4b6"}, + {file = "label_studio_tools-0.0.3-py3-none-any.whl", hash = "sha256:72930ec95b2f203014b3741818cb46800dafdebf0247f1bd66bb14cc48a12e89"}, +] + +[package.dependencies] +appdirs = ">=1.4.3" +lxml = ">=4.2.5" + [[package]] name = "lazy-loader" version = "0.3" @@ -1009,6 +1059,113 @@ files = [ lint = ["pre-commit (>=3.3)"] test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] +[[package]] +name = "lxml" +version = "4.9.4" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e214025e23db238805a600f1f37bf9f9a15413c7bf5f9d6ae194f84980c78722"}, + {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec53a09aee61d45e7dbe7e91252ff0491b6b5fee3d85b2d45b173d8ab453efc1"}, + {file = "lxml-4.9.4-cp27-cp27m-win32.whl", hash = "sha256:7d1d6c9e74c70ddf524e3c09d9dc0522aba9370708c2cb58680ea40174800013"}, + {file = "lxml-4.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:cb53669442895763e61df5c995f0e8361b61662f26c1b04ee82899c2789c8f69"}, + {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:647bfe88b1997d7ae8d45dabc7c868d8cb0c8412a6e730a7651050b8c7289cf2"}, + {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4d973729ce04784906a19108054e1fd476bc85279a403ea1a72fdb051c76fa48"}, + {file = "lxml-4.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:056a17eaaf3da87a05523472ae84246f87ac2f29a53306466c22e60282e54ff8"}, + {file = "lxml-4.9.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aaa5c173a26960fe67daa69aa93d6d6a1cd714a6eb13802d4e4bd1d24a530644"}, + {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:647459b23594f370c1c01768edaa0ba0959afc39caeeb793b43158bb9bb6a663"}, + {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bdd9abccd0927673cffe601d2c6cdad1c9321bf3437a2f507d6b037ef91ea307"}, + {file = "lxml-4.9.4-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:00e91573183ad273e242db5585b52670eddf92bacad095ce25c1e682da14ed91"}, + {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a602ed9bd2c7d85bd58592c28e101bd9ff9c718fbde06545a70945ffd5d11868"}, + {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:de362ac8bc962408ad8fae28f3967ce1a262b5d63ab8cefb42662566737f1dc7"}, + {file = "lxml-4.9.4-cp310-cp310-win32.whl", hash = "sha256:33714fcf5af4ff7e70a49731a7cc8fd9ce910b9ac194f66eaa18c3cc0a4c02be"}, + {file = "lxml-4.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:d3caa09e613ece43ac292fbed513a4bce170681a447d25ffcbc1b647d45a39c5"}, + {file = "lxml-4.9.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:359a8b09d712df27849e0bcb62c6a3404e780b274b0b7e4c39a88826d1926c28"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:43498ea734ccdfb92e1886dfedaebeb81178a241d39a79d5351ba2b671bff2b2"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4855161013dfb2b762e02b3f4d4a21cc7c6aec13c69e3bffbf5022b3e708dd97"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c71b5b860c5215fdbaa56f715bc218e45a98477f816b46cfde4a84d25b13274e"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9a2b5915c333e4364367140443b59f09feae42184459b913f0f41b9fed55794a"}, + {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d82411dbf4d3127b6cde7da0f9373e37ad3a43e89ef374965465928f01c2b979"}, + {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:273473d34462ae6e97c0f4e517bd1bf9588aa67a1d47d93f760a1282640e24ac"}, + {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:389d2b2e543b27962990ab529ac6720c3dded588cc6d0f6557eec153305a3622"}, + {file = "lxml-4.9.4-cp311-cp311-win32.whl", hash = "sha256:8aecb5a7f6f7f8fe9cac0bcadd39efaca8bbf8d1bf242e9f175cbe4c925116c3"}, + {file = "lxml-4.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:c7721a3ef41591341388bb2265395ce522aba52f969d33dacd822da8f018aff8"}, + {file = "lxml-4.9.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:dbcb2dc07308453db428a95a4d03259bd8caea97d7f0776842299f2d00c72fc8"}, + {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:01bf1df1db327e748dcb152d17389cf6d0a8c5d533ef9bab781e9d5037619229"}, + {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e8f9f93a23634cfafbad6e46ad7d09e0f4a25a2400e4a64b1b7b7c0fbaa06d9d"}, + {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3f3f00a9061605725df1816f5713d10cd94636347ed651abdbc75828df302b20"}, + {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:953dd5481bd6252bd480d6ec431f61d7d87fdcbbb71b0d2bdcfc6ae00bb6fb10"}, + {file = "lxml-4.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:f1faee2a831fe249e1bae9cbc68d3cd8a30f7e37851deee4d7962b17c410dd56"}, + {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:23d891e5bdc12e2e506e7d225d6aa929e0a0368c9916c1fddefab88166e98b20"}, + {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e96a1788f24d03e8d61679f9881a883ecdf9c445a38f9ae3f3f193ab6c591c66"}, + {file = "lxml-4.9.4-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:5557461f83bb7cc718bc9ee1f7156d50e31747e5b38d79cf40f79ab1447afd2d"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:fdb325b7fba1e2c40b9b1db407f85642e32404131c08480dd652110fc908561b"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d74d4a3c4b8f7a1f676cedf8e84bcc57705a6d7925e6daef7a1e54ae543a197"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ac7674d1638df129d9cb4503d20ffc3922bd463c865ef3cb412f2c926108e9a4"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:ddd92e18b783aeb86ad2132d84a4b795fc5ec612e3545c1b687e7747e66e2b53"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bd9ac6e44f2db368ef8986f3989a4cad3de4cd55dbdda536e253000c801bcc7"}, + {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bc354b1393dce46026ab13075f77b30e40b61b1a53e852e99d3cc5dd1af4bc85"}, + {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f836f39678cb47c9541f04d8ed4545719dc31ad850bf1832d6b4171e30d65d23"}, + {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:9c131447768ed7bc05a02553d939e7f0e807e533441901dd504e217b76307745"}, + {file = "lxml-4.9.4-cp36-cp36m-win32.whl", hash = "sha256:bafa65e3acae612a7799ada439bd202403414ebe23f52e5b17f6ffc2eb98c2be"}, + {file = "lxml-4.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6197c3f3c0b960ad033b9b7d611db11285bb461fc6b802c1dd50d04ad715c225"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:7b378847a09d6bd46047f5f3599cdc64fcb4cc5a5a2dd0a2af610361fbe77b16"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:1343df4e2e6e51182aad12162b23b0a4b3fd77f17527a78c53f0f23573663545"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6dbdacf5752fbd78ccdb434698230c4f0f95df7dd956d5f205b5ed6911a1367c"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:506becdf2ecaebaf7f7995f776394fcc8bd8a78022772de66677c84fb02dd33d"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca8e44b5ba3edb682ea4e6185b49661fc22b230cf811b9c13963c9f982d1d964"}, + {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9d9d5726474cbbef279fd709008f91a49c4f758bec9c062dfbba88eab00e3ff9"}, + {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bbdd69e20fe2943b51e2841fc1e6a3c1de460d630f65bde12452d8c97209464d"}, + {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8671622256a0859f5089cbe0ce4693c2af407bc053dcc99aadff7f5310b4aa02"}, + {file = "lxml-4.9.4-cp37-cp37m-win32.whl", hash = "sha256:dd4fda67f5faaef4f9ee5383435048ee3e11ad996901225ad7615bc92245bc8e"}, + {file = "lxml-4.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6bee9c2e501d835f91460b2c904bc359f8433e96799f5c2ff20feebd9bb1e590"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:1f10f250430a4caf84115b1e0f23f3615566ca2369d1962f82bef40dd99cd81a"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3b505f2bbff50d261176e67be24e8909e54b5d9d08b12d4946344066d66b3e43"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1449f9451cd53e0fd0a7ec2ff5ede4686add13ac7a7bfa6988ff6d75cff3ebe2"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4ece9cca4cd1c8ba889bfa67eae7f21d0d1a2e715b4d5045395113361e8c533d"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59bb5979f9941c61e907ee571732219fa4774d5a18f3fa5ff2df963f5dfaa6bc"}, + {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b1980dbcaad634fe78e710c8587383e6e3f61dbe146bcbfd13a9c8ab2d7b1192"}, + {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9ae6c3363261021144121427b1552b29e7b59de9d6a75bf51e03bc072efb3c37"}, + {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bcee502c649fa6351b44bb014b98c09cb00982a475a1912a9881ca28ab4f9cd9"}, + {file = "lxml-4.9.4-cp38-cp38-win32.whl", hash = "sha256:a8edae5253efa75c2fc79a90068fe540b197d1c7ab5803b800fccfe240eed33c"}, + {file = "lxml-4.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:701847a7aaefef121c5c0d855b2affa5f9bd45196ef00266724a80e439220e46"}, + {file = "lxml-4.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:f610d980e3fccf4394ab3806de6065682982f3d27c12d4ce3ee46a8183d64a6a"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aa9b5abd07f71b081a33115d9758ef6077924082055005808f68feccb27616bd"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:365005e8b0718ea6d64b374423e870648ab47c3a905356ab6e5a5ff03962b9a9"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:16b9ec51cc2feab009e800f2c6327338d6ee4e752c76e95a35c4465e80390ccd"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a905affe76f1802edcac554e3ccf68188bea16546071d7583fb1b693f9cf756b"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd814847901df6e8de13ce69b84c31fc9b3fb591224d6762d0b256d510cbf382"}, + {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91bbf398ac8bb7d65a5a52127407c05f75a18d7015a270fdd94bbcb04e65d573"}, + {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f99768232f036b4776ce419d3244a04fe83784bce871b16d2c2e984c7fcea847"}, + {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bb5bd6212eb0edfd1e8f254585290ea1dadc3687dd8fd5e2fd9a87c31915cdab"}, + {file = "lxml-4.9.4-cp39-cp39-win32.whl", hash = "sha256:88f7c383071981c74ec1998ba9b437659e4fd02a3c4a4d3efc16774eb108d0ec"}, + {file = "lxml-4.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:936e8880cc00f839aa4173f94466a8406a96ddce814651075f95837316369899"}, + {file = "lxml-4.9.4-pp310-pypy310_pp73-macosx_11_0_x86_64.whl", hash = "sha256:f6c35b2f87c004270fa2e703b872fcc984d714d430b305145c39d53074e1ffe0"}, + {file = "lxml-4.9.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:606d445feeb0856c2b424405236a01c71af7c97e5fe42fbc778634faef2b47e4"}, + {file = "lxml-4.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1bdcbebd4e13446a14de4dd1825f1e778e099f17f79718b4aeaf2403624b0f7"}, + {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0a08c89b23117049ba171bf51d2f9c5f3abf507d65d016d6e0fa2f37e18c0fc5"}, + {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:232fd30903d3123be4c435fb5159938c6225ee8607b635a4d3fca847003134ba"}, + {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:231142459d32779b209aa4b4d460b175cadd604fed856f25c1571a9d78114771"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:520486f27f1d4ce9654154b4494cf9307b495527f3a2908ad4cb48e4f7ed7ef7"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:562778586949be7e0d7435fcb24aca4810913771f845d99145a6cee64d5b67ca"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a9e7c6d89c77bb2770c9491d988f26a4b161d05c8ca58f63fb1f1b6b9a74be45"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:786d6b57026e7e04d184313c1359ac3d68002c33e4b1042ca58c362f1d09ff58"}, + {file = "lxml-4.9.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95ae6c5a196e2f239150aa4a479967351df7f44800c93e5a975ec726fef005e2"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:9b556596c49fa1232b0fff4b0e69b9d4083a502e60e404b44341e2f8fb7187f5"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:cc02c06e9e320869d7d1bd323df6dd4281e78ac2e7f8526835d3d48c69060683"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:857d6565f9aa3464764c2cb6a2e3c2e75e1970e877c188f4aeae45954a314e0c"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c42ae7e010d7d6bc51875d768110c10e8a59494855c3d4c348b068f5fb81fdcd"}, + {file = "lxml-4.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f10250bb190fb0742e3e1958dd5c100524c2cc5096c67c8da51233f7448dc137"}, + {file = "lxml-4.9.4.tar.gz", hash = "sha256:b1541e50b78e15fa06a2670157a1962ef06591d4c998b998047fff5e3236880e"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (==0.29.37)"] + [[package]] name = "matplotlib" version = "3.8.2" @@ -1170,36 +1327,36 @@ files = [ [[package]] name = "pandas" -version = "2.1.3" +version = "2.1.4" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acf08a73b5022b479c1be155d4988b72f3020f308f7a87c527702c5f8966d34f"}, - {file = "pandas-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3cc4469ff0cf9aa3a005870cb49ab8969942b7156e0a46cc3f5abd6b11051dfb"}, - {file = "pandas-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35172bff95f598cc5866c047f43c7f4df2c893acd8e10e6653a4b792ed7f19bb"}, - {file = "pandas-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59dfe0e65a2f3988e940224e2a70932edc964df79f3356e5f2997c7d63e758b4"}, - {file = "pandas-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0296a66200dee556850d99b24c54c7dfa53a3264b1ca6f440e42bad424caea03"}, - {file = "pandas-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:465571472267a2d6e00657900afadbe6097c8e1dc43746917db4dfc862e8863e"}, - {file = "pandas-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04d4c58e1f112a74689da707be31cf689db086949c71828ef5da86727cfe3f82"}, - {file = "pandas-2.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fa2ad4ff196768ae63a33f8062e6838efed3a319cf938fdf8b95e956c813042"}, - {file = "pandas-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4441ac94a2a2613e3982e502ccec3bdedefe871e8cea54b8775992485c5660ef"}, - {file = "pandas-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ded6ff28abbf0ea7689f251754d3789e1edb0c4d0d91028f0b980598418a58"}, - {file = "pandas-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca5680368a5139d4920ae3dc993eb5106d49f814ff24018b64d8850a52c6ed2"}, - {file = "pandas-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:de21e12bf1511190fc1e9ebc067f14ca09fccfb189a813b38d63211d54832f5f"}, - {file = "pandas-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a5d53c725832e5f1645e7674989f4c106e4b7249c1d57549023ed5462d73b140"}, - {file = "pandas-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7cf4cf26042476e39394f1f86868d25b265ff787c9b2f0d367280f11afbdee6d"}, - {file = "pandas-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72c84ec1b1d8e5efcbff5312abe92bfb9d5b558f11e0cf077f5496c4f4a3c99e"}, - {file = "pandas-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f539e113739a3e0cc15176bf1231a553db0239bfa47a2c870283fd93ba4f683"}, - {file = "pandas-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc77309da3b55732059e484a1efc0897f6149183c522390772d3561f9bf96c00"}, - {file = "pandas-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:08637041279b8981a062899da0ef47828df52a1838204d2b3761fbd3e9fcb549"}, - {file = "pandas-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b99c4e51ef2ed98f69099c72c75ec904dd610eb41a32847c4fcbc1a975f2d2b8"}, - {file = "pandas-2.1.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7ea8ae8004de0381a2376662c0505bb0a4f679f4c61fbfd122aa3d1b0e5f09d"}, - {file = "pandas-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcd76d67ca2d48f56e2db45833cf9d58f548f97f61eecd3fdc74268417632b8a"}, - {file = "pandas-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1329dbe93a880a3d7893149979caa82d6ba64a25e471682637f846d9dbc10dd2"}, - {file = "pandas-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:321ecdb117bf0f16c339cc6d5c9a06063854f12d4d9bc422a84bb2ed3207380a"}, - {file = "pandas-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:11a771450f36cebf2a4c9dbd3a19dfa8c46c4b905a3ea09dc8e556626060fe71"}, - {file = "pandas-2.1.3.tar.gz", hash = "sha256:22929f84bca106921917eb73c1521317ddd0a4c71b395bcf767a106e3494209f"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, + {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, + {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, + {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, + {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, + {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, + {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, + {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, + {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, + {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, ] [package.dependencies] @@ -1237,13 +1394,13 @@ xml = ["lxml (>=4.8.0)"] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] @@ -1315,13 +1472,13 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -1345,13 +1502,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.5.0" +version = "3.6.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, + {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, + {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, ] [package.dependencies] @@ -1383,27 +1540,27 @@ files = [ [[package]] name = "psutil" -version = "5.9.6" +version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, - {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, - {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, - {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, - {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, - {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, - {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, - {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, - {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, - {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, ] [package.extras] @@ -1445,6 +1602,142 @@ files = [ {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] +[[package]] +name = "pydantic" +version = "2.5.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.14.5" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.5" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pyflakes" version = "3.1.0" @@ -1771,13 +2064,13 @@ stats = ["scipy (>=1.7)", "statsmodels (>=0.12)"] [[package]] name = "sentry-sdk" -version = "1.38.0" +version = "1.39.1" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = "*" files = [ - {file = "sentry-sdk-1.38.0.tar.gz", hash = "sha256:8feab81de6bbf64f53279b085bd3820e3e737403b0a0d9317f73a2c3374ae359"}, - {file = "sentry_sdk-1.38.0-py2.py3-none-any.whl", hash = "sha256:0017fa73b8ae2d4e57fd2522ee3df30453715b29d2692142793ec5d5f90b94a6"}, + {file = "sentry-sdk-1.39.1.tar.gz", hash = "sha256:320a55cdf9da9097a0bead239c35b7e61f53660ef9878861824fd6d9b2eaf3b5"}, + {file = "sentry_sdk-1.39.1-py2.py3-none-any.whl", hash = "sha256:81b5b9ffdd1a374e9eb0c053b5d2012155db9cbe76393a8585677b753bd5fdc1"}, ] [package.dependencies] @@ -1965,13 +2258,13 @@ files = [ [[package]] name = "tifffile" -version = "2023.9.26" +version = "2023.12.9" description = "Read and write TIFF files" optional = false python-versions = ">=3.9" files = [ - {file = "tifffile-2023.9.26-py3-none-any.whl", hash = "sha256:1de47fa945fddaade256e25ad4f375ae65547f3c1354063aded881c32a64cf89"}, - {file = "tifffile-2023.9.26.tar.gz", hash = "sha256:67e355e4595aab397f8405d04afe1b4ae7c6f62a44e22d933fee1a571a48c7ae"}, + {file = "tifffile-2023.12.9-py3-none-any.whl", hash = "sha256:9b066e4b1a900891ea42ffd33dab8ba34c537935618b9893ddef42d7d422692f"}, + {file = "tifffile-2023.12.9.tar.gz", hash = "sha256:9dd1da91180a6453018a241ff219e1905f169384355cd89c9ef4034c1b46cdb8"}, ] [package.dependencies] @@ -2013,13 +2306,13 @@ telegram = ["requests"] [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -2071,13 +2364,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "wandb" -version = "0.16.0" +version = "0.16.1" description = "A CLI and library for interacting with the Weights & Biases API." optional = false python-versions = ">=3.7" files = [ - {file = "wandb-0.16.0-py3-none-any.whl", hash = "sha256:e103142a5ecdb158d29441c2bf9f935ae149ed562377f7cebffd2a6f7c9de949"}, - {file = "wandb-0.16.0.tar.gz", hash = "sha256:8d9875f1d8d75fee32dc51f6727bc277ce4f3869d7319ccf5f36ce596597402a"}, + {file = "wandb-0.16.1-py3-none-any.whl", hash = "sha256:1d7423f92520984585bae9693bb637ae08d3e0c1d75ad4b34215bc44431f114c"}, + {file = "wandb-0.16.1.tar.gz", hash = "sha256:ffe6e8dd8cc8fcd72010c1246fb3d6d226b37c4f111f3f94308a1c0ae28a2fec"}, ] [package.dependencies] @@ -2101,12 +2394,12 @@ typing-extensions = {version = "*", markers = "python_version < \"3.10\""} async = ["httpx (>=0.23.0)"] aws = ["boto3"] azure = ["azure-identity", "azure-storage-blob"] +core = ["wandb-core (>=0.17.0b2)"] gcp = ["google-cloud-storage"] kubeflow = ["google-cloud-storage", "kubernetes", "minio", "sh"] launch = ["PyYAML (>=6.0.0)", "awscli", "azure-containerregistry", "azure-identity", "azure-storage-blob", "boto3", "botocore", "chardet", "google-auth", "google-cloud-aiplatform", "google-cloud-artifact-registry", "google-cloud-compute", "google-cloud-storage", "iso8601", "kubernetes", "kubernetes-asyncio", "nbconvert", "nbformat", "optuna", "typing-extensions"] media = ["bokeh", "moviepy", "numpy", "pillow", "plotly", "rdkit-pypi", "soundfile"] models = ["cloudpickle"] -nexus = ["wandb-core (>=0.17.0b1)"] perf = ["orjson"] sweeps = ["sweeps (>=0.2.0)"] @@ -2245,4 +2538,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "d0a398b3738e8aa3771631995f2eb161ac863807b20f7161852e323da11a9463" +content-hash = "c6a5c39be3bd546e1a659a64d0e358026b0e7936823f3aee9cdc728448f2833b" diff --git a/pyproject.toml b/pyproject.toml index 524fa9cb..b453d666 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,7 @@ matplotlib = "^3.7.3" google-cloud-storage = "^2.10.0" tqdm = "^4.66.1" xxhash = "^3.4.1" +label-studio-sdk = "^0.0.32" #torch = {version="^2.1.0", source="pytorch"} #torchvision = {version="^0.16.0", source="pytorch"} From dd8a18d5a7cc4664ce0c2692fcac25aa8fd706cc Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 14:07:15 +0800 Subject: [PATCH 04/35] Set default LABEL_STUDIO_API_KEY --- src/frdc/conf.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/frdc/conf.py b/src/frdc/conf.py index c94806fe..99fd2f87 100644 --- a/src/frdc/conf.py +++ b/src/frdc/conf.py @@ -7,6 +7,8 @@ import requests from google.cloud import storage as gcs +logger = logging.getLogger(__name__) + ROOT_DIR = Path(__file__).parents[2] LOCAL_DATASET_ROOT_DIR = ROOT_DIR / "rsc" os.environ["GOOGLE_CLOUD_PROJECT"] = "frmodel" @@ -14,9 +16,9 @@ GCS_BUCKET_NAME = "frdc-ds" GCS_CREDENTIALS = None LABEL_STUDIO_URL = "http://localhost:8080" -LABEL_STUDIO_API_KEY = os.environ["LABEL_STUDIO_API_KEY"] -logger = logging.getLogger(__name__) +if not (LABEL_STUDIO_API_KEY := os.environ.get("LABEL_STUDIO_API_KEY", None)): + logger.warning("LABEL_STUDIO_API_KEY not set") BAND_CONFIG = OrderedDict( { From e16480b1104e62c0fa3cde5ba0c6ae6710acaa40 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 14:17:28 +0800 Subject: [PATCH 05/35] Fix bad typing version --- src/frdc/load/label_studio.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/frdc/load/label_studio.py b/src/frdc/load/label_studio.py index 54a0fce1..b8287ff3 100644 --- a/src/frdc/load/label_studio.py +++ b/src/frdc/load/label_studio.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging from pathlib import Path from warnings import warn From 6a85fa7d38028eef25f273bc7280270dfe461080 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 14:58:25 +0800 Subject: [PATCH 06/35] Unignore shell files, add cml entrypoint --- .gitignore | 1 - cml.sh | 9 +++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 cml.sh diff --git a/.gitignore b/.gitignore index 4fcff576..e522579c 100644 --- a/.gitignore +++ b/.gitignore @@ -166,7 +166,6 @@ rsc/**/*.tif **/*/lightning_logs *.zip -*.sh *.ckpt /rsc **/wandb/ diff --git a/cml.sh b/cml.sh new file mode 100644 index 00000000..900fa3d1 --- /dev/null +++ b/cml.sh @@ -0,0 +1,9 @@ +set -a +source .env +set +a + +cml runner launch \ + --token=${GH_CML_TOKEN} \ + --labels="cml-gpu" \ + --idle-timeout="1h" --driver=github + From 8e71ad1753d7d634617ab402cb399913f6fc6a72 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 15:10:53 +0800 Subject: [PATCH 07/35] Update model-tests.yml --- .github/workflows/model-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index c61342aa..36ff2052 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -63,6 +63,7 @@ jobs: # Do not do cd as it'll break PYTHONPATH. - name: Run Model Training run: | + sleep 1000 python3 -m tests.model_tests.chestnut_dec_may.train - name: Comment results via CML From 68effdba7192b505d830628dd80a552b95a3afd1 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 15:28:44 +0800 Subject: [PATCH 08/35] Update model-tests.yml --- .github/workflows/model-tests.yml | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index 36ff2052..b4463e63 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -4,6 +4,12 @@ on: pull_request: branches: ['main'] workflow_dispatch: + inputs: + debug_enabled: + type: boolean + description: 'Run the build with tmate debugging enabled (https://github.com/marketplace/actions/debugging-with-tmate)' + required: false + default: false jobs: @@ -33,8 +39,8 @@ jobs: - name: Install via exported requirements.txt run: | - python -m pip install --upgrade pip - python -m pip install flake8 pytest poetry + python3 -m pip install --upgrade pip + python3 -m pip install flake8 pytest poetry poetry export --with dev --without-hashes -o requirements.txt pip3 install -r requirements.txt pip3 install torch torchvision torchaudio @@ -60,6 +66,11 @@ jobs: run: | echo "PYTHONPATH=src" >> $GITHUB_ENV + # Enable tmate debugging of manually-triggered workflows if the input option was provided + - name: Setup tmate session + uses: mxschmitt/action-tmate@v3 + if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }} + # Do not do cd as it'll break PYTHONPATH. - name: Run Model Training run: | From 046e14023cbafec82ec9acbc2fec692eceb6579e Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 15:53:04 +0800 Subject: [PATCH 09/35] Attempt to fix tests not found --- .github/workflows/model-tests.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index b4463e63..ce864b3e 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -61,21 +61,20 @@ jobs: run: | echo "WANDB_API_KEY=${{ secrets.WANDB_API_KEY }}" >> $GITHUB_ENV - # Our project has src as a source path, explicitly add that in. - - name: Add src as PYTHONPATH - run: | - echo "PYTHONPATH=src" >> $GITHUB_ENV + - name: Add directories to PYTHONPATH + run: > + echo "PYTHONPATH=${{ github.workspace }}/src:\ + ${{ github.workspace }}/tests" >> $GITHUB_ENV # Enable tmate debugging of manually-triggered workflows if the input option was provided - name: Setup tmate session uses: mxschmitt/action-tmate@v3 if: ${{ github.event_name == 'workflow_dispatch' && inputs.debug_enabled }} - # Do not do cd as it'll break PYTHONPATH. - name: Run Model Training + working-directory: ${{ github.workspace }}/tests run: | - sleep 1000 - python3 -m tests.model_tests.chestnut_dec_may.train + python3 -m model_tests.chestnut_dec_may.train - name: Comment results via CML run: | From 79c3a99118b625b22a7ab39fd4e54ef946cb3f5e Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 19:21:25 +0800 Subject: [PATCH 10/35] Attempt to connect to label studio in cml --- .github/workflows/model-tests.yml | 7 +++++-- src/frdc/conf.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index ce864b3e..fb35d8f1 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -14,17 +14,19 @@ on: jobs: build: - runs-on: self-hosted container: image: docker://ghcr.io/iterative/cml:0-dvc2-base1-gpu volumes: + # This mounts and persists the venv between runs - /home/runner/work/frdc-ml/_github_home:/root + # This mounts the resources folder + - /home/runner/work/frdc-ml/_github_home/rsc:/__w/FRDC-ML/FRDC-ML/rsc env: # This is where setup-python will install and cache the venv AGENT_TOOLSDIRECTORY: "/root/venv" - options: --gpus all + options: --gpus all --network label-studio steps: - uses: actions/checkout@v3 @@ -74,6 +76,7 @@ jobs: - name: Run Model Training working-directory: ${{ github.workspace }}/tests run: | + git config --global --add safe.directory /__w/FRDC-ML/FRDC-ML' python3 -m model_tests.chestnut_dec_may.train - name: Comment results via CML diff --git a/src/frdc/conf.py b/src/frdc/conf.py index 99fd2f87..2e3c5127 100644 --- a/src/frdc/conf.py +++ b/src/frdc/conf.py @@ -15,7 +15,7 @@ GCS_PROJECT_ID = "frmodel" GCS_BUCKET_NAME = "frdc-ds" GCS_CREDENTIALS = None -LABEL_STUDIO_URL = "http://localhost:8080" +LABEL_STUDIO_URL = "http://label-studio:8080" if not (LABEL_STUDIO_API_KEY := os.environ.get("LABEL_STUDIO_API_KEY", None)): logger.warning("LABEL_STUDIO_API_KEY not set") From 4bb29e389bdb5b5286bdead068aec0936b52e072 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 20:14:06 +0800 Subject: [PATCH 11/35] Remove network line --- .github/workflows/model-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index fb35d8f1..628e2f5a 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -26,7 +26,7 @@ jobs: # This is where setup-python will install and cache the venv AGENT_TOOLSDIRECTORY: "/root/venv" - options: --gpus all --network label-studio + options: --gpus all steps: - uses: actions/checkout@v3 From 6084fbbdce074d2c094c732315d20c781196cb19 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 20:29:23 +0800 Subject: [PATCH 12/35] Update docker-compose.yml --- src/label-studio/docker-compose.yml | 143 +++++++++++++++------------- 1 file changed, 77 insertions(+), 66 deletions(-) diff --git a/src/label-studio/docker-compose.yml b/src/label-studio/docker-compose.yml index 2efb1d1d..4a6d909f 100644 --- a/src/label-studio/docker-compose.yml +++ b/src/label-studio/docker-compose.yml @@ -1,66 +1,77 @@ -version: "3.9" -services: - nginx: - build: . - image: heartexlabs/label-studio:latest - restart: unless-stopped - ports: - - "8080:8085" - - "8081:8086" - depends_on: - - app - environment: - - LABEL_STUDIO_HOST=${LABEL_STUDIO_HOST:-} - # Optional: Specify SSL termination certificate & key - # Just drop your cert.pem and cert.key into folder 'deploy/nginx/certs' - # - NGINX_SSL_CERT=/certs/cert.pem - # - NGINX_SSL_CERT_KEY=/certs/cert.key - volumes: - - ./mydata:/label-studio/data:rw - - ./deploy/nginx/certs:/certs:ro - # Optional: Override nginx default conf - # - ./deploy/my.conf:/etc/nginx/nginx.conf - command: nginx - - app: - stdin_open: true - tty: true - build: . - image: heartexlabs/label-studio:latest - restart: unless-stopped - expose: - - "8000" - depends_on: - - db - environment: - - DJANGO_DB=default - - POSTGRE_NAME=postgres - - POSTGRE_USER=postgres - - POSTGRE_PASSWORD= - - POSTGRE_PORT=5432 - - POSTGRE_HOST=db - - LABEL_STUDIO_HOST=${LABEL_STUDIO_HOST:-} - - JSON_LOG=1 - # - LOG_LEVEL=DEBUG - volumes: - - ./mydata:/label-studio/data:rw - command: label-studio-uwsgi - - db: - image: postgres:11.5 - hostname: db - restart: unless-stopped - # Optional: Enable TLS on PostgreSQL - # Just drop your server.crt and server.key into folder 'deploy/pgsql/certs' - # NOTE: Both files must have permissions u=rw (0600) or less - # command: > - # -c ssl=on - # -c ssl_cert_file=/var/lib/postgresql/certs/server.crt - # -c ssl_key_file=/var/lib/postgresql/certs/server.key - ports: - - "5432:5432" - environment: - - POSTGRES_HOST_AUTH_METHOD=trust - volumes: - - ${POSTGRES_DATA_DIR:-./postgres-data}:/var/lib/postgresql/data - - ./deploy/pgsql/certs:/var/lib/postgresql/certs:ro +version: "3.9" +services: + nginx: + build: . + image: heartexlabs/label-studio:latest + restart: unless-stopped + ports: + - "8080:8085" + - "8081:8086" + depends_on: + - app + environment: + - LABEL_STUDIO_HOST=${LABEL_STUDIO_HOST:-} + # Optional: Specify SSL termination certificate & key + # Just drop your cert.pem and cert.key into folder 'deploy/nginx/certs' + # - NGINX_SSL_CERT=/certs/cert.pem + # - NGINX_SSL_CERT_KEY=/certs/cert.key + volumes: + - ./mydata:/label-studio/data:rw + - ./deploy/nginx/certs:/certs:ro + # Optional: Override nginx default conf + # - ./deploy/my.conf:/etc/nginx/nginx.conf + command: nginx + networks: + - label-studio + + app: + stdin_open: true + tty: true + build: . + image: heartexlabs/label-studio:latest + restart: unless-stopped + expose: + - "8000" + depends_on: + - db + environment: + - DJANGO_DB=default + - POSTGRE_NAME=postgres + - POSTGRE_USER=postgres + - POSTGRE_PASSWORD= + - POSTGRE_PORT=5432 + - POSTGRE_HOST=db + - LABEL_STUDIO_HOST=${LABEL_STUDIO_HOST:-} + - JSON_LOG=1 + # - LOG_LEVEL=DEBUG + volumes: + - ./mydata:/label-studio/data:rw + command: label-studio-uwsgi + networks: + - label-studio + + db: + image: postgres:11.5 + hostname: db + restart: unless-stopped + # Optional: Enable TLS on PostgreSQL + # Just drop your server.crt and server.key into folder 'deploy/pgsql/certs' + # NOTE: Both files must have permissions u=rw (0600) or less + # command: > + # -c ssl=on + # -c ssl_cert_file=/var/lib/postgresql/certs/server.crt + # -c ssl_key_file=/var/lib/postgresql/certs/server.key + ports: + - "5432:5432" + environment: + - POSTGRES_HOST_AUTH_METHOD=trust + volumes: + - ${POSTGRES_DATA_DIR:-./postgres-data}:/var/lib/postgresql/data + - ./deploy/pgsql/certs:/var/lib/postgresql/certs:ro + networks: + - label-studio + +networks: + label-studio: + name: label-studio +# driver: bridge From 9a7f4080af3f27053a9c72d787730daba06764e0 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 20:39:48 +0800 Subject: [PATCH 13/35] Force owner --- .github/workflows/model-tests.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index 628e2f5a..84c656c7 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -28,11 +28,10 @@ jobs: options: --gpus all steps: - - uses: actions/checkout@v3 - - name: Force change owner run: | chown -R root: ~ + - uses: actions/checkout@v3 - name: Set up Python 3.11 uses: actions/setup-python@v4 From 2ebc298ee70257c3d5d5de3a884bdec0fe644c1b Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 20:41:20 +0800 Subject: [PATCH 14/35] don't mount rsc --- .github/workflows/model-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index 84c656c7..033bf341 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -21,7 +21,7 @@ jobs: # This mounts and persists the venv between runs - /home/runner/work/frdc-ml/_github_home:/root # This mounts the resources folder - - /home/runner/work/frdc-ml/_github_home/rsc:/__w/FRDC-ML/FRDC-ML/rsc +# - /home/runner/work/frdc-ml/_github_home/rsc:/__w/FRDC-ML/FRDC-ML/rsc env: # This is where setup-python will install and cache the venv AGENT_TOOLSDIRECTORY: "/root/venv" From 4b13ce7d69681d054642fc699c987f6f3c59a2b3 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 21:00:04 +0800 Subject: [PATCH 15/35] Attempt to get label-studio via docker host --- .github/workflows/model-tests.yml | 6 +- src/frdc/conf.py | 151 +++++++++++++++--------------- 2 files changed, 81 insertions(+), 76 deletions(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index 033bf341..06bb253a 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -21,11 +21,15 @@ jobs: # This mounts and persists the venv between runs - /home/runner/work/frdc-ml/_github_home:/root # This mounts the resources folder -# - /home/runner/work/frdc-ml/_github_home/rsc:/__w/FRDC-ML/FRDC-ML/rsc + # - /home/runner/work/frdc-ml/_github_home/rsc:/__w/FRDC-ML/FRDC-ML/rsc env: # This is where setup-python will install and cache the venv AGENT_TOOLSDIRECTORY: "/root/venv" + # This uses the host's exposed services + LABEL_STUDIO_HOST: "host.docker.internal" + LABEL_STUDIO_API_KEY: "${{ secrets.LABEL_STUDIO_API_KEY }}" + options: --gpus all steps: - name: Force change owner diff --git a/src/frdc/conf.py b/src/frdc/conf.py index 2e3c5127..89986988 100644 --- a/src/frdc/conf.py +++ b/src/frdc/conf.py @@ -1,75 +1,76 @@ -import logging -import os -from collections import OrderedDict -from pathlib import Path - -import label_studio_sdk as label_studio -import requests -from google.cloud import storage as gcs - -logger = logging.getLogger(__name__) - -ROOT_DIR = Path(__file__).parents[2] -LOCAL_DATASET_ROOT_DIR = ROOT_DIR / "rsc" -os.environ["GOOGLE_CLOUD_PROJECT"] = "frmodel" -GCS_PROJECT_ID = "frmodel" -GCS_BUCKET_NAME = "frdc-ds" -GCS_CREDENTIALS = None -LABEL_STUDIO_URL = "http://label-studio:8080" - -if not (LABEL_STUDIO_API_KEY := os.environ.get("LABEL_STUDIO_API_KEY", None)): - logger.warning("LABEL_STUDIO_API_KEY not set") - -BAND_CONFIG = OrderedDict( - { - "WB": ("*result.tif", lambda x: x[..., 2:3]), - "WG": ("*result.tif", lambda x: x[..., 1:2]), - "WR": ("*result.tif", lambda x: x[..., 0:1]), - "NB": ("result_Blue.tif", lambda x: x), - "NG": ("result_Green.tif", lambda x: x), - "NR": ("result_Red.tif", lambda x: x), - "RE": ("result_RedEdge.tif", lambda x: x), - "NIR": ("result_NIR.tif", lambda x: x), - } -) - -BAND_MAX_CONFIG: dict[str, tuple[int, int]] = { - "WR": (0, 2**8), - "WG": (0, 2**8), - "WB": (0, 2**8), - "NR": (0, 2**14), - "NG": (0, 2**14), - "NB": (0, 2**14), - "RE": (0, 2**14), - "NIR": (0, 2**14), -} - -try: - logger.info("Connecting to GCS...") - GCS_CLIENT = gcs.Client( - project=GCS_PROJECT_ID, - credentials=GCS_CREDENTIALS, - ) - GCS_BUCKET = GCS_CLIENT.bucket(GCS_BUCKET_NAME) - logger.info("Connected to GCS.") -except Exception as e: - logger.warning( - "Could not connect to GCS. Will not be able to download files. " - "GCS_CLIENT will be None." - ) - GCS_CLIENT = None - -try: - logger.info("Connecting to Label Studio...") - requests.get(LABEL_STUDIO_URL) - LABEL_STUDIO_CLIENT = label_studio.Client( - url=LABEL_STUDIO_URL, - api_key=LABEL_STUDIO_API_KEY, - ) - logger.info("Connected to Label Studio.") -except requests.exceptions.ConnectionError: - logger.warning( - f"Could not connect to Label Studio at {LABEL_STUDIO_URL}. " - f"LABEL_STUDIO_CLIENT will be None." - ) - LABEL_STUDIO_CLIENT = None +import logging +import os +from collections import OrderedDict +from pathlib import Path + +import label_studio_sdk as label_studio +import requests +from google.cloud import storage as gcs + +logger = logging.getLogger(__name__) + +ROOT_DIR = Path(__file__).parents[2] +LOCAL_DATASET_ROOT_DIR = ROOT_DIR / "rsc" +os.environ["GOOGLE_CLOUD_PROJECT"] = "frmodel" +GCS_PROJECT_ID = "frmodel" +GCS_BUCKET_NAME = "frdc-ds" +GCS_CREDENTIALS = None +LABEL_STUDIO_HOST = os.environ.get("LABEL_STUDIO_HOST", "localhost") +LABEL_STUDIO_URL = f"http://{LABEL_STUDIO_HOST}:8080" + +if not (LABEL_STUDIO_API_KEY := os.environ.get("LABEL_STUDIO_API_KEY", None)): + logger.warning("LABEL_STUDIO_API_KEY not set") + +BAND_CONFIG = OrderedDict( + { + "WB": ("*result.tif", lambda x: x[..., 2:3]), + "WG": ("*result.tif", lambda x: x[..., 1:2]), + "WR": ("*result.tif", lambda x: x[..., 0:1]), + "NB": ("result_Blue.tif", lambda x: x), + "NG": ("result_Green.tif", lambda x: x), + "NR": ("result_Red.tif", lambda x: x), + "RE": ("result_RedEdge.tif", lambda x: x), + "NIR": ("result_NIR.tif", lambda x: x), + } +) + +BAND_MAX_CONFIG: dict[str, tuple[int, int]] = { + "WR": (0, 2**8), + "WG": (0, 2**8), + "WB": (0, 2**8), + "NR": (0, 2**14), + "NG": (0, 2**14), + "NB": (0, 2**14), + "RE": (0, 2**14), + "NIR": (0, 2**14), +} + +try: + logger.info("Connecting to GCS...") + GCS_CLIENT = gcs.Client( + project=GCS_PROJECT_ID, + credentials=GCS_CREDENTIALS, + ) + GCS_BUCKET = GCS_CLIENT.bucket(GCS_BUCKET_NAME) + logger.info("Connected to GCS.") +except Exception as e: + logger.warning( + "Could not connect to GCS. Will not be able to download files. " + "GCS_CLIENT will be None." + ) + GCS_CLIENT = None + +try: + logger.info("Connecting to Label Studio...") + requests.get(LABEL_STUDIO_URL) + LABEL_STUDIO_CLIENT = label_studio.Client( + url=LABEL_STUDIO_URL, + api_key=LABEL_STUDIO_API_KEY, + ) + logger.info("Connected to Label Studio.") +except requests.exceptions.ConnectionError: + logger.warning( + f"Could not connect to Label Studio at {LABEL_STUDIO_URL}. " + f"LABEL_STUDIO_CLIENT will be None." + ) + LABEL_STUDIO_CLIENT = None From ede2a5c9cd4c65fb1b8843dc5e6998f882394bf4 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 21:03:26 +0800 Subject: [PATCH 16/35] Update conf and compose --- .dvcignore | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 .dvcignore diff --git a/.dvcignore b/.dvcignore deleted file mode 100644 index 51973055..00000000 --- a/.dvcignore +++ /dev/null @@ -1,3 +0,0 @@ -# Add patterns of files dvc should ignore, which could improve -# the performance. Learn more at -# https://dvc.org/doc/user-guide/dvcignore From 2d1efdd206c9523846b9e83243297ce6adca1909 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 21:05:10 +0800 Subject: [PATCH 17/35] Fix incorrect git config --- .github/workflows/model-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index 06bb253a..93ee9e14 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -79,7 +79,7 @@ jobs: - name: Run Model Training working-directory: ${{ github.workspace }}/tests run: | - git config --global --add safe.directory /__w/FRDC-ML/FRDC-ML' + git config --global --add safe.directory /__w/FRDC-ML/FRDC-ML python3 -m model_tests.chestnut_dec_may.train - name: Comment results via CML From b55afdd096ba315d5de2b0f37cf183bfc669e1a0 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 21:15:15 +0800 Subject: [PATCH 18/35] Add a check for Label Studio server up --- .github/workflows/model-tests.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index 93ee9e14..feb1ffba 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -2,7 +2,7 @@ name: Model Training on: pull_request: - branches: ['main'] + branches: [ 'main' ] workflow_dispatch: inputs: debug_enabled: @@ -32,10 +32,15 @@ jobs: options: --gpus all steps: + - uses: actions/checkout@v3 + - name: Force change owner run: | chown -R root: ~ - - uses: actions/checkout@v3 + + - name: Check if Label Studio Server is up + run: | + curl --fail --silent --head http://${{ env.LABEL_STUDIO_HOST }} || exit 1 - name: Set up Python 3.11 uses: actions/setup-python@v4 From 2de41eb63c59f0ef07e41ccb60aec92204c28f64 Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 21:21:19 +0800 Subject: [PATCH 19/35] Improve formatting for report --- tests/model_tests/chestnut_dec_may/train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/model_tests/chestnut_dec_may/train.py b/tests/model_tests/chestnut_dec_may/train.py index 299eb708..9c2b3c96 100644 --- a/tests/model_tests/chestnut_dec_may/train.py +++ b/tests/model_tests/chestnut_dec_may/train.py @@ -114,8 +114,8 @@ def main( with open(Path(__file__).parent / "report.md", "w") as f: f.write( - f"# Chestnut Nature Park (Dec 2020 vs May 2021)" - f"[WandB Report]({run.get_url()})" + f"# Chestnut Nature Park (Dec 2020 vs May 2021)\n" + f"- Results: [WandB Report]({run.get_url()})" ) fig, acc = evaluate( From a4f00c2a9d9a859c82d43f50c9d56c5b4575723a Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 20 Dec 2023 21:21:28 +0800 Subject: [PATCH 20/35] Fix issue with env substitution --- .github/workflows/model-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/model-tests.yml b/.github/workflows/model-tests.yml index feb1ffba..ca734adb 100644 --- a/.github/workflows/model-tests.yml +++ b/.github/workflows/model-tests.yml @@ -40,7 +40,7 @@ jobs: - name: Check if Label Studio Server is up run: | - curl --fail --silent --head http://${{ env.LABEL_STUDIO_HOST }} || exit 1 + curl --fail --silent --head http://host.docker.internal:8080 || exit 1 - name: Set up Python 3.11 uses: actions/setup-python@v4 From bf92c2c1ccfc8b072ab6d943149c6f9b20a24a45 Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 14:19:48 +0800 Subject: [PATCH 21/35] Remove unused evaluate script --- src/frdc/evaluate/__init__.py | 2 -- src/frdc/evaluate/evaluate.py | 31 ------------------------------- 2 files changed, 33 deletions(-) delete mode 100644 src/frdc/evaluate/evaluate.py diff --git a/src/frdc/evaluate/__init__.py b/src/frdc/evaluate/__init__.py index 48354a4b..8b137891 100644 --- a/src/frdc/evaluate/__init__.py +++ b/src/frdc/evaluate/__init__.py @@ -1,3 +1 @@ -from .evaluate import dummy_evaluate -__all__ = ["dummy_evaluate"] diff --git a/src/frdc/evaluate/evaluate.py b/src/frdc/evaluate/evaluate.py deleted file mode 100644 index a0ccfbed..00000000 --- a/src/frdc/evaluate/evaluate.py +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Callable - -import numpy as np -from sklearn.base import ClassifierMixin - - -def dummy_evaluate( - *, - feature_extraction: Callable[[np.ndarray], np.ndarray], - classifier: ClassifierMixin, - X_test: np.ndarray, - y_test: np.ndarray, -) -> float: - """Dummy Evaluation function. - - Notes: - This is obviously not final. This is just a placeholder to get the - pipeline working. - - Args: - feature_extraction: The feature extraction function. - classifier: The classifier. - X_test: X_test is the test image numpy array of shape (N, H, W, C). - y_test: y_test is the test class label a numpy array of shape (N,). - - Returns: - The score of the model. - """ - # TODO: Replace this with how the model scores - - return classifier.score(feature_extraction(X_test), y_test) From 77ba78a33bed09b7bf4051e889795634311cda20 Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 14:20:44 +0800 Subject: [PATCH 22/35] Make GCS error clearer --- src/frdc/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/frdc/conf.py b/src/frdc/conf.py index 89986988..5098025d 100644 --- a/src/frdc/conf.py +++ b/src/frdc/conf.py @@ -56,6 +56,8 @@ except Exception as e: logger.warning( "Could not connect to GCS. Will not be able to download files. " + "Check that you've (1) Installed the GCS CLI and (2) Set up the" + "ADC with `gcloud auth application-default login`. " "GCS_CLIENT will be None." ) GCS_CLIENT = None From 5c4a36cfbd672593293dc07ebe1fd30c9f762f63 Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 14:20:51 +0800 Subject: [PATCH 23/35] Fix missing default on exception --- src/frdc/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/frdc/conf.py b/src/frdc/conf.py index 5098025d..e2f5958a 100644 --- a/src/frdc/conf.py +++ b/src/frdc/conf.py @@ -61,6 +61,7 @@ "GCS_CLIENT will be None." ) GCS_CLIENT = None + GCS_BUCKET = None try: logger.info("Connecting to Label Studio...") From d46f4e3fb56ae6b5becd2ba81389fd9dd3119e2b Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 14:21:03 +0800 Subject: [PATCH 24/35] Add dev container spec --- .devcontainer/devcontainer.json | 16 ++++++++++++++++ Dockerfile | 19 +++++++++++++++++++ 2 files changed, 35 insertions(+) create mode 100644 .devcontainer/devcontainer.json create mode 100644 Dockerfile diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..9b0313b8 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,16 @@ +{ + "name": "frdc", + "build": { + "context": "../", + "dockerfile": "Dockerfile", + }, + "containerEnv": { + "LABEL_STUDIO_HOST": "host.docker.internal", + }, + "runArgs": [ + "--gpus=all", + ], + "hostRequirements": { + "gpu": true, + } +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..e1378f66 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,19 @@ +FROM pytorch/pytorch:2.1.2-cuda12.1-cudnn8-runtime as torch +WORKDIR /devcontainer + +COPY ./pyproject.toml /devcontainer/pyproject.toml + +RUN apt-get update +RUN apt-get install git -y + +RUN pip3 install --upgrade pip && \ + pip3 install poetry + +RUN conda init bash \ + && . ~/.bashrc \ + && conda activate base \ + && poetry config virtualenvs.create false \ + && poetry install --with dev --no-interaction --no-ansi + +RUN apt-get install curl -y && curl -sSL https://sdk.cloud.google.com | bash +ENV PATH $PATH:/root/google-cloud-sdk/bin \ No newline at end of file From c2ba141e24d02c47d88691ca199087c63a553c4b Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 14:22:54 +0800 Subject: [PATCH 25/35] Delete rsc.dvc --- rsc.dvc | 424 -------------------------------------------------------- 1 file changed, 424 deletions(-) delete mode 100644 rsc.dvc diff --git a/rsc.dvc b/rsc.dvc deleted file mode 100644 index f9ae0e31..00000000 --- a/rsc.dvc +++ /dev/null @@ -1,424 +0,0 @@ -outs: -- hash: md5 - path: rsc - files: - - relpath: DEBUG/0/bounds.csv - md5: ad253d8dfe0cbbc8107647b11d9a2ce2 - size: 325 - cloud: - frdc-ds: - etag: 08b6ab869c8de682031001 - version_id: '1701154195084726' - - relpath: DEBUG/0/result.jpg - md5: 36e390821a45f70fc442bbd5d881f495 - size: 7361 - cloud: - frdc-ds: - etag: 08ccc6cc8191e682031001 - version_id: '1701155213353804' - - relpath: DEBUG/0/result.tif - md5: c700d23b1332113e74051d0b0d187d6e - size: 60154 - cloud: - frdc-ds: - etag: 0893da859c8de682031001 - version_id: '1701154195074323' - - relpath: DEBUG/0/result_Blue.tif - md5: 663cb540356deb8811d0b779e4435bf5 - size: 60134 - cloud: - frdc-ds: - etag: 08fada859c8de682031001 - version_id: '1701154195074426' - - relpath: DEBUG/0/result_Green.tif - md5: 2643f64a2e775d4ee95583e108ab5e9c - size: 60134 - cloud: - frdc-ds: - etag: 08bc99869c8de682031001 - version_id: '1701154195082428' - - relpath: DEBUG/0/result_NIR.tif - md5: 946c2eedf7e658fd9924b0a99b3282fa - size: 60134 - cloud: - frdc-ds: - etag: 08879c869c8de682031001 - version_id: '1701154195082759' - - relpath: DEBUG/0/result_Red.tif - md5: f566accc812ae46a0147b0078d0f4411 - size: 60134 - cloud: - frdc-ds: - etag: 0884fa869c8de682031001 - version_id: '1701154195094788' - - relpath: DEBUG/0/result_RedEdge.tif - md5: ab998dba488a0fd7c362fab04c1b4034 - size: 60134 - cloud: - frdc-ds: - etag: 08a98c869c8de682031001 - version_id: '1701154195080745' - - relpath: casuarina/20220418/183deg/dsm.tif - md5: 43260454e85e6f5795a5c17213de3334 - size: 36290602 - cloud: - frdc-ds: - etag: 08fd818aa08de682031001 - version_id: '1701154203533565' - - relpath: casuarina/20220418/183deg/gsddsm.tif - md5: f67e3c112989145af0cc0b5a0478fa22 - size: 98938 - cloud: - frdc-ds: - etag: 08ceb6879c8de682031001 - version_id: '1701154195102542' - - relpath: casuarina/20220418/183deg/result.jpg - md5: ceae7eab3f8a39485e3b4f5c5b7d47b3 - size: 13379694 - cloud: - frdc-ds: - etag: 08be83e98191e682031001 - version_id: '1701155213820350' - - relpath: casuarina/20220418/183deg/result.tif - md5: e355405f060d55a6b7374b3542fdc3c1 - size: 197880775 - cloud: - frdc-ds: - etag: 08f2c3ebaf8de682031001 - version_id: '1701154236588530' - - relpath: casuarina/20220418/183deg/result_Blue.tif - md5: 03bf7028f6527db900b95d7621463cce - size: 223640924 - cloud: - frdc-ds: - etag: 08b4bf9eac8de682031001 - version_id: '1701154229034932' - - relpath: casuarina/20220418/183deg/result_Green.tif - md5: d475aef93075d7375f38f73895d5fdb8 - size: 226577241 - cloud: - frdc-ds: - etag: 088b9682ac8de682031001 - version_id: '1701154228570891' - - relpath: casuarina/20220418/183deg/result_NIR.tif - md5: 2c9cc4d48cf9316dea3a6f536fe5a3ac - size: 222016430 - cloud: - frdc-ds: - etag: 089be4c1ab8de682031001 - version_id: '1701154227515931' - - relpath: casuarina/20220418/183deg/result_Red.tif - md5: 73977a9d21760bbbcc8b42f79bcb4692 - size: 226754875 - cloud: - frdc-ds: - etag: 08e99287ac8de682031001 - version_id: '1701154228652393' - - relpath: casuarina/20220418/183deg/result_RedEdge.tif - md5: 3f04518f4d8a35e2efdc3c3d6187860f - size: 223730363 - cloud: - frdc-ds: - etag: 08ffbea1ac8de682031001 - version_id: '1701154229084031' - - relpath: casuarina/20220418/183deg/segment.tif - md5: 830f7ca57c34ac3a0eb6a852cb272373 - size: 26708 - cloud: - frdc-ds: - etag: 08ccdd859c8de682031001 - version_id: '1701154195074764' - - relpath: casuarina/20220418/93deg/dsm.tif - md5: 5fbd6841a0b7426bd01d4a3b68d06a9c - size: 36780878 - cloud: - frdc-ds: - etag: 08f8f3c9a08de682031001 - version_id: '1701154204580344' - - relpath: casuarina/20220418/93deg/gsddsm.tif - md5: ac659a3f17b2e21dc852a04af246906b - size: 91346 - cloud: - frdc-ds: - etag: 08c798869c8de682031001 - version_id: '1701154195082311' - - relpath: casuarina/20220418/93deg/result.jpg - md5: e0f493e7704cf377b4f41132313950d1 - size: 13131131 - cloud: - frdc-ds: - etag: 08bffae48191e682031001 - version_id: '1701155213753663' - - relpath: casuarina/20220418/93deg/result.tif - md5: 83feb031787b457f5017db9acdebf625 - size: 195394032 - cloud: - frdc-ds: - etag: 088df3e1aa8de682031001 - version_id: '1701154225944973' - - relpath: casuarina/20220418/93deg/result_Blue.tif - md5: d97ce2b5a44e896810e6401b4f5ab506 - size: 220670499 - cloud: - frdc-ds: - etag: 08d5d5b0b28de682031001 - version_id: '1701154241915605' - - relpath: casuarina/20220418/93deg/result_Green.tif - md5: 40ef3a8c81ba28366ba47a1fa4989702 - size: 223797748 - cloud: - frdc-ds: - etag: 08d3ec8cac8de682031001 - version_id: '1701154228745811' - - relpath: casuarina/20220418/93deg/result_NIR.tif - md5: a2ce084c7ba1c5f5a6b0b4a1dc93a98c - size: 219316999 - cloud: - frdc-ds: - etag: 08f4b0fdab8de682031001 - version_id: '1701154228492404' - - relpath: casuarina/20220418/93deg/result_Red.tif - md5: 65144fb2a827f6dd1723538afd375902 - size: 224060322 - cloud: - frdc-ds: - etag: 0885ce9fac8de682031001 - version_id: '1701154229053189' - - relpath: casuarina/20220418/93deg/result_RedEdge.tif - md5: ceda32971f11a11af00a4f1838475e16 - size: 221188044 - cloud: - frdc-ds: - etag: 08f8dceaab8de682031001 - version_id: '1701154228186744' - - relpath: casuarina/20220418/93deg/segment.tif - md5: ea46f1c799137aa0fd0256b20c5a66d6 - size: 25383 - cloud: - frdc-ds: - etag: 0881da859c8de682031001 - version_id: '1701154195074305' - - relpath: chestnut_nature_park/20201218/bounds.csv - md5: d7043890368db04693865c2451c35a1f - size: 1408 - cloud: - frdc-ds: - etag: 08e98f869c8de682031001 - version_id: '1701154195081193' - - relpath: chestnut_nature_park/20201218/dsm.tif - md5: fbf608de495c51dace8bda86cadb4b65 - size: 14482537 - cloud: - frdc-ds: - etag: 08b8a1c79c8de682031001 - version_id: '1701154196148408' - - relpath: chestnut_nature_park/20201218/result.jpg - md5: 91c1cff8671c332b20e3ca128431d6d7 - size: 4780586 - cloud: - frdc-ds: - etag: 08ddf0c49c8de682031001 - version_id: '1701154196109405' - - relpath: chestnut_nature_park/20201218/result.tif - md5: d08c5b89a54b24ae85dd5d15e2a462fb - size: 80415617 - cloud: - frdc-ds: - etag: 08fafec5a58de682031001 - version_id: '1701154215001978' - - relpath: chestnut_nature_park/20201218/result_Blue.tif - md5: ad5eacd90476ad6a56111009f958a30b - size: 95205235 - cloud: - frdc-ds: - etag: 08cebef79b8de682031001 - version_id: '1701154194841422' - - relpath: chestnut_nature_park/20201218/result_Green.tif - md5: 5c85ec286f81edecaa5f2978086a6f79 - size: 96849701 - cloud: - frdc-ds: - etag: 08d2eff6a08de682031001 - version_id: '1701154205317074' - - relpath: chestnut_nature_park/20201218/result_NIR.tif - md5: fa7df3dece555c289aea03c2c996195d - size: 94504684 - cloud: - frdc-ds: - etag: 08b180c2a68de682031001 - version_id: '1701154217033777' - - relpath: chestnut_nature_park/20201218/result_Red.tif - md5: 09b07ef8678e8f4ce058fd091760eab6 - size: 96915726 - cloud: - frdc-ds: - etag: 088cd0d0a18de682031001 - version_id: '1701154206787596' - - relpath: chestnut_nature_park/20201218/result_RedEdge.tif - md5: 097c3c39629534dcdcb8f0b645ad9736 - size: 95540119 - cloud: - frdc-ds: - etag: 08ffa289a78de682031001 - version_id: '1701154218201471' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/bounds.csv - md5: 58f03982df33a8b52c61e51daf6b450b - size: 1452 - cloud: - frdc-ds: - etag: 08b2b8859c8de682031001 - version_id: '1701154195070002' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/dsm.tif - md5: 041257e74edb3d9719debc3d9fe7d4dd - size: 20773786 - cloud: - frdc-ds: - etag: 08f7b2809f8de682031001 - version_id: '1701154201278839' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/gsddsm.tif - md5: 45b28f2f3735de83d65aea19e2697676 - size: 45878 - cloud: - frdc-ds: - etag: 08f691869c8de682031001 - version_id: '1701154195081462' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/result.jpg - md5: d012b232cc1faed2871ebf5150d20cc1 - size: 6941116 - cloud: - frdc-ds: - etag: 08cfbae38191e682031001 - version_id: '1701155213729103' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/result.tif - md5: 43541fa1f15171ba8548fb9b6ae77cfe - size: 103859274 - cloud: - frdc-ds: - etag: 089b99c3a78de682031001 - version_id: '1701154219150491' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/result_Blue.tif - md5: 87bef8f7bff639951ffa452f6ef9bc3f - size: 117714429 - cloud: - frdc-ds: - etag: 08d8b18ca88de682031001 - version_id: '1701154220349656' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/result_Green.tif - md5: c01de4a50b5caa01ace693e5191b3870 - size: 119925274 - cloud: - frdc-ds: - etag: 08bdfcbaa78de682031001 - version_id: '1701154219015741' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/result_NIR.tif - md5: 2d842ef0d2f43e5269c1fc456aeb746d - size: 117211078 - cloud: - frdc-ds: - etag: 08fb9adaa78de682031001 - version_id: '1701154219527547' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/result_Red.tif - md5: 214d97f9eacf93ce29c6898858b7f146 - size: 119913392 - cloud: - frdc-ds: - etag: 08e5ecdda78de682031001 - version_id: '1701154219587173' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/result_RedEdge.tif - md5: b45f71ffe2c14280100e22c23ba871ab - size: 118263239 - cloud: - frdc-ds: - etag: 089798e6a88de682031001 - version_id: '1701154221820951' - - relpath: chestnut_nature_park/20210510/90deg43m85pct255deg/segment.tif - md5: 86191f2ab6e7374ce98c83491ce43527 - size: 10021 - cloud: - frdc-ds: - etag: 08b5e5859c8de682031001 - version_id: '1701154195075765' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/dsm.tif - md5: 99fa34f9ca8dc66d95b9b2fb144b334f - size: 10857218 - cloud: - frdc-ds: - etag: 08dc84e09d8de682031001 - version_id: '1701154198651484' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/gsddsm.tif - md5: 09f4143d1083c743f6a2bbed2dba5d81 - size: 55570 - cloud: - frdc-ds: - etag: 089e95869c8de682031001 - version_id: '1701154195081886' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/result.jpg - md5: cfc21b5bf466b57e888733f37d922017 - size: 3243560 - cloud: - frdc-ds: - etag: 08edc4db8191e682031001 - version_id: '1701155213599341' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/result.tif - md5: 6b99351bd16f05fafc2f96e2945ec094 - size: 51407149 - cloud: - frdc-ds: - etag: 08fca18da48de682031001 - version_id: '1701154211975420' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/result_Blue.tif - md5: c240b4e607af2cdadc3d4fc96181e005 - size: 59362235 - cloud: - frdc-ds: - etag: 08b3d484a58de682031001 - version_id: '1701154213931571' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/result_Green.tif - md5: 08d7b7e3e4823b0cf9c53692d6b55f5b - size: 60380606 - cloud: - frdc-ds: - etag: 08ebe3a2a58de682031001 - version_id: '1701154214425067' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/result_NIR.tif - md5: ab3509fac188fe452d01957237181f31 - size: 59034799 - cloud: - frdc-ds: - etag: 0893bcfca48de682031001 - version_id: '1701154213797395' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/result_Red.tif - md5: 6b0531431f0d898aacda6334c10fca8a - size: 60369184 - cloud: - frdc-ds: - etag: 08e8a58ea58de682031001 - version_id: '1701154214089448' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/result_RedEdge.tif - md5: 75e67bc7c1b01e060c68062354192b8e - size: 59567305 - cloud: - frdc-ds: - etag: 08f0dc91ad8de682031001 - version_id: '1701154230922864' - - relpath: chestnut_nature_park/20210510/90deg60m84.5pct255deg/segment.tif - md5: 2b5856c859b44e54517b7891fd53bd60 - size: 12784 - cloud: - frdc-ds: - etag: 08838e869c8de682031001 - version_id: '1701154195080963' - - relpath: chestnut_nature_park/20210510/Join/adding -90deg 60m data/cloud.las - md5: a4755ad17e0c0f2686a1fb9cacc712b7 - size: 74248398 - cloud: - frdc-ds: - etag: 0892daa1a48de682031001 - version_id: '1701154212310290' - - relpath: chestnut_nature_park/20210510/Join/cloud.las - md5: 8ce9bccccab90a8a5c17ad5b3a982f0d - size: 61187376 - cloud: - frdc-ds: - etag: 08fda5a8a58de682031001 - version_id: '1701154214515453' From 276fa174e0dcac7def581b0e715c97dcc875f760 Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 14:41:50 +0800 Subject: [PATCH 26/35] Get api key from host --- .devcontainer/devcontainer.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 9b0313b8..ca281b81 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -6,6 +6,7 @@ }, "containerEnv": { "LABEL_STUDIO_HOST": "host.docker.internal", + "LABEL_STUDIO_API_KEY": "${localEnv:LABEL_STUDIO_API_KEY}", }, "runArgs": [ "--gpus=all", From 70b275ebb3a9b9bed4559a851e437d306c0226f6 Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 15:10:40 +0800 Subject: [PATCH 27/35] Add missing lightning dep --- Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index e1378f66..9951dc95 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,8 @@ RUN apt-get update RUN apt-get install git -y RUN pip3 install --upgrade pip && \ - pip3 install poetry + pip3 install poetry && \ + pip3 install lightning RUN conda init bash \ && . ~/.bashrc \ @@ -16,4 +17,4 @@ RUN conda init bash \ && poetry install --with dev --no-interaction --no-ansi RUN apt-get install curl -y && curl -sSL https://sdk.cloud.google.com | bash -ENV PATH $PATH:/root/google-cloud-sdk/bin \ No newline at end of file +ENV PATH $PATH:/root/google-cloud-sdk/bin From a1d79c158cfc927cca4a7319e54609e50e64de84 Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 15:10:53 +0800 Subject: [PATCH 28/35] Add uncommentable local W&B setup --- tests/model_tests/chestnut_dec_may/train.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/model_tests/chestnut_dec_may/train.py b/tests/model_tests/chestnut_dec_may/train.py index 9c2b3c96..f31ee825 100644 --- a/tests/model_tests/chestnut_dec_may/train.py +++ b/tests/model_tests/chestnut_dec_may/train.py @@ -4,7 +4,10 @@ the 20210510 dataset. """ -import os +# Uncomment this to run the W&B monitoring locally +# import os +# os.environ["WANDB_MODE"] = "offline" + from pathlib import Path import lightning as pl From 5d457abd3c17cf7d1884b3d87bd7b891741b7f5c Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 15:46:13 +0800 Subject: [PATCH 29/35] Update getting started docs for dev container --- Writerside/topics/Getting-Started.md | 125 ++++++++++++++++++----- Writerside/writerside.cfg | 2 +- docs/HelpTOC.json | 2 +- docs/custom-k-aug-dataloaders.html | 6 +- docs/getting-started.html | 24 +++-- docs/icon-192.png | Bin 0 -> 337 bytes docs/icon-512.png | Bin 0 -> 1103 bytes docs/load-dataset.html | 8 +- docs/load-gcs.html | 6 +- docs/mix-match-module.html | 12 +-- docs/mix-match.html | 2 +- docs/model-test-chestnut-may-dec.html | 2 +- docs/overview.html | 2 +- docs/preprocessing-extract-segments.html | 16 +-- docs/preprocessing-glcm-padded.html | 4 +- docs/preprocessing-morphology.html | 6 +- docs/preprocessing-scale.html | 4 +- docs/retrieve-our-datasets.html | 10 +- docs/site.webmanifest | 11 ++ docs/train-frdc-lightning.html | 4 +- 20 files changed, 167 insertions(+), 79 deletions(-) create mode 100644 docs/icon-192.png create mode 100644 docs/icon-512.png create mode 100644 docs/site.webmanifest diff --git a/Writerside/topics/Getting-Started.md b/Writerside/topics/Getting-Started.md index 7615174d..10fe4398 100644 --- a/Writerside/topics/Getting-Started.md +++ b/Writerside/topics/Getting-Started.md @@ -10,7 +10,7 @@ Start by cloning our repository. - git clone https://github.com/Forest-Recovery-Digital-Companion/FRDC-ML.git + git clone https://github.com/FR-DC/FRDC-ML.git Then, create a Python Virtual Env pyvenv @@ -60,6 +60,26 @@ + + + Only use Dev. Containers if you're familiar with your IDEs, it's highly + dependent on clicking around the IDE. + + Do not set up a new environment, it'll be included in the environment. + + Ensure that you have installed pre-requisites for respective IDEs. + VSCode + IntelliJ + + Start by cloning our repository. + + git clone https://github.com/FR-DC/FRDC-ML.git + + + Follow steps for respective IDEs to set up the Dev. Container. + Activate the virtual environment. The venv is located in /opt/venv + + We use Google Cloud to store our datasets. To set up Google Cloud, @@ -86,6 +106,49 @@ + + This is only necessary if any task requires Label Studio annotations + + We use Label Studio to annotate our datasets. + We won't go through how to install Label Studio, for contributors, it + should be up on localhost:8080. + + + Then, retrieve your own API key from Label Studio. + Go to your account page + and copy the API key.
+ Set your API key as an environment variable. + + + In Windows, go to "Edit environment variables for + your account" and add this as a new environment variable with name + LABEL_STUDIO_API_KEY. + + + Export it as an environment variable. + export LABEL_STUDIO_API_KEY=... + + + +
+ + + + + We use W&B to track our experiments. To set up W&B, + + install the W&B CLI + + + + Then, + + authenticate your account + . + wandb login + + + This is optional but recommended. Pre-commit hooks are a way to ensure that your code is formatted correctly. @@ -98,30 +161,45 @@ - + Run the tests to make sure everything is working pytest - - In case of errors: - - - If you get this error, it means that you haven't authenticated your - Google Cloud account. - See Setting Up Google Cloud - - - If you get this error, it means that you haven't installed the - dependencies. - See Installing the Dev. Environment - - - +## Troubleshooting + +### ModuleNotFoundError + +It's likely that your `src` and `tests` directories are not in `PYTHONPATH`. +To fix this, run the following command: + +```shell +export PYTHONPATH=$PYTHONPATH:./src:./tests +``` + +Or, set it in your IDE, for example, IntelliJ allows setting directories as +**Source Roots**. + +### google.auth.exceptions.DefaultCredentialsError + +It's likely that you haven't authenticated your Google Cloud account. +See [Setting Up Google Cloud](#gcloud) + +### Couldn't connect to Label Studio + +Label Studio must be running locally, exposed on `localhost:8080`. Furthermore, +you need to specify the `LABEL_STUDIO_API_KEY` environment variable. See +[Setting Up Label Studio](#ls) + +### Cannot login to W&B + +You need to authenticate your W&B account. See [Setting Up Weight and Biases](#wandb) +If you're facing difficulties, set the `WANDB_MODE` environment variable to `offline` +to disable W&B. ## Our Repository Structure @@ -132,7 +210,6 @@ help you understand where to put your code. graph LR FRDC -- " Core Dependencies " --> src/frdc/ FRDC -- " Resources " --> rsc/ - FRDC -- " Pipeline " --> pipeline/ FRDC -- " Tests " --> tests/ FRDC -- " Repo Dependencies " --> pyproject.toml,poetry.lock src/frdc/ -- " Dataset Loaders " --> ./load/ @@ -140,7 +217,6 @@ graph LR src/frdc/ -- " Train Deps " --> ./train/ src/frdc/ -- " Model Architectures " --> ./models/ rsc/ -- " Datasets ... " --> ./dataset_name/ - pipeline/ -- " Model Training Pipeline " --> ./model_tests/ ``` src/frdc/ @@ -149,11 +225,8 @@ src/frdc/ rsc/ : Resources. These are usually cached datasets -pipeline/ -: Pipeline code. These are the full ML tests of our pipeline. - tests/ -: PyTest tests. These are unit tests & integration tests. +: PyTest tests. These are unit, integration, and model tests. ### Unit, Integration, and Pipeline Tests @@ -161,7 +234,7 @@ We have 3 types of tests: - Unit Tests are usually small, single function tests. - Integration Tests are larger tests that tests a mock pipeline. -- Pipeline Tests are the true production pipeline tests that will generate a +- Model Tests are the true production pipeline tests that will generate a model. ### Where Should I contribute? @@ -176,9 +249,9 @@ at the src/frdc/ directory. By adding a new component, you'll need to add a new test. Take a look at the tests/ directory. - + If you're a ML Researcher, you'll probably be changing the pipeline. Take a -look at the pipeline/ directory. +look at the tests/model_tests/ directory. If you're adding a new dependency, use poetry add PACKAGE and diff --git a/Writerside/writerside.cfg b/Writerside/writerside.cfg index 39e81f21..9e1b0444 100644 --- a/Writerside/writerside.cfg +++ b/Writerside/writerside.cfg @@ -4,5 +4,5 @@ - + \ No newline at end of file diff --git a/docs/HelpTOC.json b/docs/HelpTOC.json index 54d3f877..107c112d 100644 --- a/docs/HelpTOC.json +++ b/docs/HelpTOC.json @@ -1 +1 @@ -{"entities":{"pages":{"Overview":{"id":"Overview","title":"Overview","url":"overview.html","level":0,"tabIndex":0},"Getting-Started":{"id":"Getting-Started","title":"Getting Started","url":"getting-started.html","level":0,"tabIndex":1},"ae6f1f90_3454":{"id":"ae6f1f90_3454","title":"Tutorials","level":0,"pages":["Retrieve-our-Datasets"],"tabIndex":2},"Retrieve-our-Datasets":{"id":"Retrieve-our-Datasets","title":"Retrieve our Datasets","url":"retrieve-our-datasets.html","level":1,"parentId":"ae6f1f90_3454","tabIndex":0},"mix-match":{"id":"mix-match","title":"MixMatch","url":"mix-match.html","level":0,"pages":["mix-match-module","custom-k-aug-dataloaders"],"tabIndex":3},"mix-match-module":{"id":"mix-match-module","title":"MixMatch Module","url":"mix-match-module.html","level":1,"parentId":"mix-match","tabIndex":0},"custom-k-aug-dataloaders":{"id":"custom-k-aug-dataloaders","title":"Custom K-Aug Dataloaders","url":"custom-k-aug-dataloaders.html","level":1,"parentId":"mix-match","tabIndex":1},"ae6f1f90_3459":{"id":"ae6f1f90_3459","title":"Model Tests","level":0,"pages":["Model-Test-Chestnut-May-Dec"],"tabIndex":4},"Model-Test-Chestnut-May-Dec":{"id":"Model-Test-Chestnut-May-Dec","title":"Model Test Chestnut May-Dec","url":"model-test-chestnut-may-dec.html","level":1,"parentId":"ae6f1f90_3459","tabIndex":0},"ae6f1f90_3461":{"id":"ae6f1f90_3461","title":"API","level":0,"pages":["load.dataset","load.gcs","preprocessing.scale","preprocessing.extract_segments","preprocessing.morphology","preprocessing.glcm_padded","train.frdc_lightning"],"tabIndex":5},"load.dataset":{"id":"load.dataset","title":"load.dataset","url":"load-dataset.html","level":1,"parentId":"ae6f1f90_3461","tabIndex":0},"load.gcs":{"id":"load.gcs","title":"load.gcs","url":"load-gcs.html","level":1,"parentId":"ae6f1f90_3461","tabIndex":1},"preprocessing.scale":{"id":"preprocessing.scale","title":"preprocessing.scale","url":"preprocessing-scale.html","level":1,"parentId":"ae6f1f90_3461","tabIndex":2},"preprocessing.extract_segments":{"id":"preprocessing.extract_segments","title":"preprocessing.extract_segments","url":"preprocessing-extract-segments.html","level":1,"parentId":"ae6f1f90_3461","tabIndex":3},"preprocessing.morphology":{"id":"preprocessing.morphology","title":"preprocessing.morphology","url":"preprocessing-morphology.html","level":1,"parentId":"ae6f1f90_3461","tabIndex":4},"preprocessing.glcm_padded":{"id":"preprocessing.glcm_padded","title":"preprocessing.glcm_padded","url":"preprocessing-glcm-padded.html","level":1,"parentId":"ae6f1f90_3461","tabIndex":5},"train.frdc_lightning":{"id":"train.frdc_lightning","title":"train.frdc_datamodule \u0026 frdc_module","url":"train-frdc-lightning.html","level":1,"parentId":"ae6f1f90_3461","tabIndex":6}}},"topLevelIds":["Overview","Getting-Started","ae6f1f90_3454","mix-match","ae6f1f90_3459","ae6f1f90_3461"]} \ No newline at end of file +{"entities":{"pages":{"Overview":{"id":"Overview","title":"Overview","url":"overview.html","level":0,"tabIndex":0},"Getting-Started":{"id":"Getting-Started","title":"Getting Started","url":"getting-started.html","level":0,"tabIndex":1},"e8e19623_38829":{"id":"e8e19623_38829","title":"Tutorials","level":0,"pages":["Retrieve-our-Datasets"],"tabIndex":2},"Retrieve-our-Datasets":{"id":"Retrieve-our-Datasets","title":"Retrieve our Datasets","url":"retrieve-our-datasets.html","level":1,"parentId":"e8e19623_38829","tabIndex":0},"mix-match":{"id":"mix-match","title":"MixMatch","url":"mix-match.html","level":0,"pages":["mix-match-module","custom-k-aug-dataloaders"],"tabIndex":3},"mix-match-module":{"id":"mix-match-module","title":"MixMatch Module","url":"mix-match-module.html","level":1,"parentId":"mix-match","tabIndex":0},"custom-k-aug-dataloaders":{"id":"custom-k-aug-dataloaders","title":"Custom K-Aug Dataloaders","url":"custom-k-aug-dataloaders.html","level":1,"parentId":"mix-match","tabIndex":1},"e8e19623_38834":{"id":"e8e19623_38834","title":"Model Tests","level":0,"pages":["Model-Test-Chestnut-May-Dec"],"tabIndex":4},"Model-Test-Chestnut-May-Dec":{"id":"Model-Test-Chestnut-May-Dec","title":"Model Test Chestnut May-Dec","url":"model-test-chestnut-may-dec.html","level":1,"parentId":"e8e19623_38834","tabIndex":0},"e8e19623_38836":{"id":"e8e19623_38836","title":"API","level":0,"pages":["load.dataset","load.gcs","preprocessing.scale","preprocessing.extract_segments","preprocessing.morphology","preprocessing.glcm_padded","train.frdc_lightning"],"tabIndex":5},"load.dataset":{"id":"load.dataset","title":"load.dataset","url":"load-dataset.html","level":1,"parentId":"e8e19623_38836","tabIndex":0},"load.gcs":{"id":"load.gcs","title":"load.gcs","url":"load-gcs.html","level":1,"parentId":"e8e19623_38836","tabIndex":1},"preprocessing.scale":{"id":"preprocessing.scale","title":"preprocessing.scale","url":"preprocessing-scale.html","level":1,"parentId":"e8e19623_38836","tabIndex":2},"preprocessing.extract_segments":{"id":"preprocessing.extract_segments","title":"preprocessing.extract_segments","url":"preprocessing-extract-segments.html","level":1,"parentId":"e8e19623_38836","tabIndex":3},"preprocessing.morphology":{"id":"preprocessing.morphology","title":"preprocessing.morphology","url":"preprocessing-morphology.html","level":1,"parentId":"e8e19623_38836","tabIndex":4},"preprocessing.glcm_padded":{"id":"preprocessing.glcm_padded","title":"preprocessing.glcm_padded","url":"preprocessing-glcm-padded.html","level":1,"parentId":"e8e19623_38836","tabIndex":5},"train.frdc_lightning":{"id":"train.frdc_lightning","title":"train.frdc_datamodule \u0026 frdc_module","url":"train-frdc-lightning.html","level":1,"parentId":"e8e19623_38836","tabIndex":6}}},"topLevelIds":["Overview","Getting-Started","e8e19623_38829","mix-match","e8e19623_38834","e8e19623_38836"]} \ No newline at end of file diff --git a/docs/custom-k-aug-dataloaders.html b/docs/custom-k-aug-dataloaders.html index f3bc78b7..487648dc 100644 --- a/docs/custom-k-aug-dataloaders.html +++ b/docs/custom-k-aug-dataloaders.html @@ -1,4 +1,4 @@ - Custom K-Aug Dataloaders | Documentation

Documentation 0.0.7 Help

Custom K-Aug Dataloaders

In MixMatch, implementing the data loading methods is quite unconventional.

  1. We need to load multiple augmented versions of the same image into the same batch.

  2. The labelled set is usually too small, causing a premature end to the epoch as it runs out of samples to draw from faster than the unlabelled set.

This can be rather tricky to implement in PyTorch. This tutorial will illustrate how we did it.

Loading Multiple Augmented Versions of the Same Image

See: frdc/load/dataset.py FRDCDataset.__getitem__

In MixMatch, a single train batch must consist of:

  1. A batch of labeled images

  2. K batches of unlabeled images

Aug
Aug
Aug
Aug
Get Batch
Aug Labelled Batch
Unlabelled Batch
Aug Unl. Batch 1
Aug Unl. Batch i
Aug Unl. Batch K

Keep in mind that the unlabelled batch, is a single batch of images, not separate draws of batches. It is then "duplicated" K times, and each copy is augmented differently.

Solution 1: Custom Dataset

To solve this, we need to understand the role of both a Dataset and a DataLoader.

  • A Dataset represents a collection of data, responsible for loading and returning something.

  • A DataLoader draws samples from a Dataset and returns batched samples.

The key here is that a Dataset is not limited to returning 1 sample at a time, we can make it return the K augmented versions of the same image.

Aug
Aug
Aug
Sample
Aug Sample 1
Aug Sample i
Aug Sample K

In code, this is done by subclassing the Dataset class and overriding the __getitem__ method.

+ Custom K-Aug Dataloaders | Documentation

Documentation 0.0.7 Help

Custom K-Aug Dataloaders

In MixMatch, implementing the data loading methods is quite unconventional.

  1. We need to load multiple augmented versions of the same image into the same batch.

  2. The labelled set is usually too small, causing a premature end to the epoch as it runs out of samples to draw from faster than the unlabelled set.

This can be rather tricky to implement in PyTorch. This tutorial will illustrate how we did it.

Loading Multiple Augmented Versions of the Same Image

See: frdc/load/dataset.py FRDCDataset.__getitem__

In MixMatch, a single train batch must consist of:

  1. A batch of labeled images

  2. K batches of unlabeled images

Aug
Aug
Aug
Aug
Get Batch
Aug Labelled Batch
Unlabelled Batch
Aug Unl. Batch 1
Aug Unl. Batch i
Aug Unl. Batch K

Keep in mind that the unlabelled batch, is a single batch of images, not separate draws of batches. It is then "duplicated" K times, and each copy is augmented differently.

Solution 1: Custom Dataset

To solve this, we need to understand the role of both a Dataset and a DataLoader.

  • A Dataset represents a collection of data, responsible for loading and returning something.

  • A DataLoader draws samples from a Dataset and returns batched samples.

The key here is that a Dataset is not limited to returning 1 sample at a time, we can make it return the K augmented versions of the same image.

Aug
Aug
Aug
Sample
Aug Sample 1
Aug Sample i
Aug Sample K

In code, this is done by subclassing the Dataset class and overriding the __getitem__ method.

def duplicate(x): return x, deepcopy(x), deepcopy(x) @@ -10,7 +10,7 @@ def __getitem__(self, index): x, y = self.dataset[index] return self.aug(x), y -

In the above example, we have a Dataset that returns 3 duplicate versions of the same image. By leveraging this technique, we can create a Dataset that returns K augmented versions of the same image as a tuple

Premature End of Epoch due to Small Labelled Set

See: frdc/train/frdc_datamodule.py

In MixMatch, the definition of an "epoch" is a bit different. Instead of implying that we have seen all the data once, it implies that we've drawn N batches. The N is referred to as the number of iterations per epoch.

Take for example, a labelled set of numbers [1, 2, 3] and an unlabelled set [4, 5, 6, 7, 8, 9, 10]. With batch size of 2, we'll run out of labelled samples after 2 iterations, but we'll still have 3 more iterations for the unlabelled set.

  • Draw 1: [1, 2], [4, 5]

  • Draw 2: [3], [6, 7].

  • Epoch ends.

Solution 2: Random Sampling

To fix this, instead of sequentially sampling the labelled set (and the unlabelled set), we can sample them randomly. This way, we can ensure that it never runs out.

  • Draw 1: [1, 3], [7, 5]

  • Draw 2: [2, 1], [4, 9]

  • Draw 3: [3, 2], [8, 6]

  • ... and so on.

Luckily, PyTorch's DataLoader supports random sampling. We just need to use RandomSampler instead of SequentialSampler (which is the default).

+

In the above example, we have a Dataset that returns 3 duplicate versions of the same image. By leveraging this technique, we can create a Dataset that returns K augmented versions of the same image as a tuple

Premature End of Epoch due to Small Labelled Set

See: frdc/train/frdc_datamodule.py

In MixMatch, the definition of an "epoch" is a bit different. Instead of implying that we have seen all the data once, it implies that we've drawn N batches. The N is referred to as the number of iterations per epoch.

Take for example, a labelled set of numbers [1, 2, 3] and an unlabelled set [4, 5, 6, 7, 8, 9, 10]. With batch size of 2, we'll run out of labelled samples after 2 iterations, but we'll still have 3 more iterations for the unlabelled set.

  • Draw 1: [1, 2], [4, 5]

  • Draw 2: [3], [6, 7].

  • Epoch ends.

Solution 2: Random Sampling

To fix this, instead of sequentially sampling the labelled set (and the unlabelled set), we can sample them randomly. This way, we can ensure that it never runs out.

  • Draw 1: [1, 3], [7, 5]

  • Draw 2: [2, 1], [4, 9]

  • Draw 3: [3, 2], [8, 6]

  • ... and so on.

Luckily, PyTorch's DataLoader supports random sampling. We just need to use RandomSampler instead of SequentialSampler (which is the default).

from torch.utils.data import DataLoader, RandomSampler dl = DataLoader( @@ -21,4 +21,4 @@ replacement=False, ) ) -

This will ensure that the "epoch" ends when we've drawn train_iters batches

Last modified: 20 December 2023
\ No newline at end of file +

This will ensure that the "epoch" ends when we've drawn train_iters batches

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/getting-started.html b/docs/getting-started.html index 172d307d..7cd27c91 100644 --- a/docs/getting-started.html +++ b/docs/getting-started.html @@ -1,20 +1,24 @@ - Getting Started | Documentation

Documentation 0.0.7 Help

Getting Started

Installing the Dev. Environment

  1. Ensure that you have the right version of Python. The required Python version can be seen in pyproject.toml

    + Getting Started | Documentation

    Documentation 0.0.7 Help

    Getting Started

    Installing the Dev. Environment

    1. Ensure that you have the right version of Python. The required Python version can be seen in pyproject.toml

      [tool.poetry.dependencies] python = "..." -
    2. Start by cloning our repository.

      - git clone https://github.com/Forest-Recovery-Digital-Companion/FRDC-ML.git -
    3. Then, create a Python Virtual Env pyvenv

      python -m venv venv/
      python3 -m venv venv/
    4. Install Poetry Then check if it's installed with

      poetry --version
    5. Activate the virtual environment

      +
    6. Start by cloning our repository.

      + git clone https://github.com/FR-DC/FRDC-ML.git +
    7. Then, create a Python Virtual Env pyvenv

      python -m venv venv/
      python3 -m venv venv/
    8. Install Poetry Then check if it's installed with

      poetry --version
    9. Activate the virtual environment

      cd venv/Scripts activate cd ../.. -
      +
      source venv/bin/activate -
    10. Install the dependencies. You should be in the same directory as pyproject.toml

      +
  2. Install the dependencies. You should be in the same directory as pyproject.toml

    poetry install --with dev -
  3. Install Pre-Commit Hooks

    +
  4. Install Pre-Commit Hooks

    pre-commit install -

Setting Up Google Cloud

  1. We use Google Cloud to store our datasets. To set up Google Cloud, install the Google Cloud CLI

  2. Then, authenticate your account.

    gcloud auth login
  3. Finally, set up Application Default Credentials (ADC).

    gcloud auth application-default login
  4. To make sure everything is working, run the tests.

Pre-commit Hooks

  • +

Use a Dev. Container

  1. Ensure that you have installed pre-requisites for respective IDEs. VSCode IntelliJ

  2. Start by cloning our repository.

    + git clone https://github.com/FR-DC/FRDC-ML.git +
  3. Follow steps for respective IDEs to set up the Dev. Container.

  4. Activate the virtual environment. The venv is located in /opt/venv

Setting Up Google Cloud

  1. We use Google Cloud to store our datasets. To set up Google Cloud, install the Google Cloud CLI

  2. Then, authenticate your account.

    gcloud auth login
  3. Finally, set up Application Default Credentials (ADC).

    gcloud auth application-default login
  4. To make sure everything is working, run the tests.

Setting Up Label Studio

  1. We use Label Studio to annotate our datasets. We won't go through how to install Label Studio, for contributors, it should be up on localhost:8080.

  2. Then, retrieve your own API key from Label Studio. Go to your account page and copy the API key.


  3. Set your API key as an environment variable.

    In Windows, go to "Edit environment variables for your account" and add this as a new environment variable with name LABEL_STUDIO_API_KEY.

    Export it as an environment variable.

    export LABEL_STUDIO_API_KEY=...

Setting Up Weight and Biases

  1. We use W&B to track our experiments. To set up W&B, install the W&B CLI

  2. Then, authenticate your account.

    wandb login

Pre-commit Hooks

  • pre-commit install -

Running the Tests

  1. Run the tests to make sure everything is working

    +

Running the Tests

  • Run the tests to make sure everything is working

    pytest -
  • In case of errors:

    google.auth.exceptions.DefaultCredentialsError

    If you get this error, it means that you haven't authenticated your Google Cloud account. See Setting Up Google Cloud

    ModuleNotFoundError

    If you get this error, it means that you haven't installed the dependencies. See Installing the Dev. Environment

Our Repository Structure

Before starting development, take a look at our repository structure. This will help you understand where to put your code.

Core Dependencies
Resources
Pipeline
Tests
Repo Dependencies
Dataset Loaders
Preprocessing Fn.
Train Deps
Model Architectures
Datasets ...
Model Training Pipeline
FRDC
src/frdc/
rsc/
pipeline/
tests/
pyproject.toml,poetry.lock
./load/
./preprocess/
./train/
./models/
./dataset_name/
./model_tests/
src/frdc/

Source Code for our package. These are the unit components of our pipeline.

rsc/

Resources. These are usually cached datasets

pipeline/

Pipeline code. These are the full ML tests of our pipeline.

tests/

PyTest tests. These are unit tests & integration tests.

Unit, Integration, and Pipeline Tests

We have 3 types of tests:

  • Unit Tests are usually small, single function tests.

  • Integration Tests are larger tests that tests a mock pipeline.

  • Pipeline Tests are the true production pipeline tests that will generate a model.

Where Should I contribute?

Changing a small component

If you're changing a small component, such as a argument for preprocessing, a new model architecture, or a new configuration for a dataset, take a look at the src/frdc/ directory.

Adding a test

By adding a new component, you'll need to add a new test. Take a look at the tests/ directory.

Changing the pipeline

If you're a ML Researcher, you'll probably be changing the pipeline. Take a look at the pipeline/ directory.

Adding a dependency

If you're adding a new dependency, use poetry add PACKAGE and commit the changes to pyproject.toml and poetry.lock.

Last modified: 20 December 2023
\ No newline at end of file +

Troubleshooting

ModuleNotFoundError

It's likely that your src and tests directories are not in PYTHONPATH. To fix this, run the following command:

+export PYTHONPATH=$PYTHONPATH:./src:./tests +

Or, set it in your IDE, for example, IntelliJ allows setting directories as Source Roots.

google.auth.exceptions.DefaultCredentialsError

It's likely that you haven't authenticated your Google Cloud account. See Setting Up Google Cloud

Couldn't connect to Label Studio

Label Studio must be running locally, exposed on localhost:8080. Furthermore, you need to specify the LABEL_STUDIO_API_KEY environment variable. See Setting Up Label Studio

Cannot login to W&B

You need to authenticate your W&B account. See Setting Up Weight and Biases If you're facing difficulties, set the WANDB_MODE environment variable to offline to disable W&B.

Our Repository Structure

Before starting development, take a look at our repository structure. This will help you understand where to put your code.

Core Dependencies
Resources
Tests
Repo Dependencies
Dataset Loaders
Preprocessing Fn.
Train Deps
Model Architectures
Datasets ...
FRDC
src/frdc/
rsc/
tests/
pyproject.toml,poetry.lock
./load/
./preprocess/
./train/
./models/
./dataset_name/
src/frdc/

Source Code for our package. These are the unit components of our pipeline.

rsc/

Resources. These are usually cached datasets

tests/

PyTest tests. These are unit, integration, and model tests.

Unit, Integration, and Pipeline Tests

We have 3 types of tests:

  • Unit Tests are usually small, single function tests.

  • Integration Tests are larger tests that tests a mock pipeline.

  • Model Tests are the true production pipeline tests that will generate a model.

Where Should I contribute?

Changing a small component

If you're changing a small component, such as a argument for preprocessing, a new model architecture, or a new configuration for a dataset, take a look at the src/frdc/ directory.

Adding a test

By adding a new component, you'll need to add a new test. Take a look at the tests/ directory.

Changing the model pipeline

If you're a ML Researcher, you'll probably be changing the pipeline. Take a look at the tests/model_tests/ directory.

Adding a dependency

If you're adding a new dependency, use poetry add PACKAGE and commit the changes to pyproject.toml and poetry.lock.

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/icon-192.png b/docs/icon-192.png new file mode 100644 index 0000000000000000000000000000000000000000..5953601c396250504ba6b31c031ea906e92b6cd9 GIT binary patch literal 337 zcmeAS@N?(olHy`uVBq!ia0vp^2SAvE2}s`E_d9@rflSGwb;dxs*#b$G<8}erCCjeWuCzLfaEd zD*IkLs+}#4;Wx^h_m~)^%s}g@2u|;lzdBFn<%jhd{?kf+gl|#zw)&%eYqF~BKhd`* zC-MF7`j+C^Uhi7HY02UJi)gTe~DWM4f DYn5)J literal 0 HcmV?d00001 diff --git a/docs/icon-512.png b/docs/icon-512.png new file mode 100644 index 0000000000000000000000000000000000000000..9840e7b0cd4973a67d66ea20a62c77380047aed1 GIT binary patch literal 1103 zcmeAS@N?(olHy`uVBq!ia0y~yU;;9k7&t&wwUqN(1_l-}PZ!6KinzB|482$b1XvEf z|I5F&QoY|*z-`s1vsK?dmwsVZ>UO9HIT3v b7H)suWWQGE&SGE*!NB0@>gTe~DWM4f4BN6W literal 0 HcmV?d00001 diff --git a/docs/load-dataset.html b/docs/load-dataset.html index 09a97fb1..dc575cd2 100644 --- a/docs/load-dataset.html +++ b/docs/load-dataset.html @@ -1,14 +1,14 @@ - load.dataset | Documentation

Documentation 0.0.7 Help

load.dataset

Usage

Firstly, to load a dataset instance, you need to initiliaze a FRDCDataset object, providing the site, date, and version.

For example, to load our Chestnut Nature Park dataset.

+ load.dataset | Documentation

Documentation 0.0.7 Help

load.dataset

Usage

Firstly, to load a dataset instance, you need to initiliaze a FRDCDataset object, providing the site, date, and version.

For example, to load our Chestnut Nature Park dataset.

from frdc.load import FRDCDataset ds = FRDCDataset(site='chestnut_nature_park', date='20201218', version=None) -

Then, we can use the ds object to load objects of the dataset:

+

Then, we can use the ds object to load objects of the dataset:

ar, order = ds.get_ar_bands() d = ds.get_ar_bands_as_dict() bounds, labels = ds.get_bounds_and_labels() -
  • ar is a stacked NDArray of the hyperspectral bands of shape (H x W x C)

  • order is a list of strings, containing the names of the bands, ordered according to the channels of ar

  • d is a dictionary of the hyperspectral bands of shape (H x W), keyed by the band names

  • bounds is a list of bounding boxes, in the format of Rect, a namedtuple of x0, y0, x1, y1

  • labels is a list of strings, containing the labels of the bounding boxes, ordered according to bounds

Filters

You can also selectively get the channels for both get_ar_bands() and get_ar_bands_as_dict() by providing a list of strings to the bands argument.

For example, to get the Wideband RGB bands, you can do:

+
  • ar is a stacked NDArray of the hyperspectral bands of shape (H x W x C)

  • order is a list of strings, containing the names of the bands, ordered according to the channels of ar

  • d is a dictionary of the hyperspectral bands of shape (H x W), keyed by the band names

  • bounds is a list of bounding boxes, in the format of Rect, a namedtuple of x0, y0, x1, y1

  • labels is a list of strings, containing the labels of the bounding boxes, ordered according to bounds

Filters

You can also selectively get the channels for both get_ar_bands() and get_ar_bands_as_dict() by providing a list of strings to the bands argument.

For example, to get the Wideband RGB bands, you can do:

ar, order = ds.get_ar_bands(bands=['WR', 'WG', 'WB']) d = ds.get_ar_bands_as_dict(bands=['WR', 'WG', 'WB']) -

This will also alter the channel order to the order of the bands provided.

See load.gcs for configuration options.

Last modified: 20 December 2023
\ No newline at end of file +

This will also alter the channel order to the order of the bands provided.

See load.gcs for configuration options.

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/load-gcs.html b/docs/load-gcs.html index e135d6e1..f02e3959 100644 --- a/docs/load-gcs.html +++ b/docs/load-gcs.html @@ -1,11 +1,11 @@ - load.gcs | Documentation

Documentation 0.0.7 Help

load.gcs

Usage

These are defined in the top-level load.gcs module.

list_gcs_datasets

Lists all datasets in the bucket as a DataFrame. This works by checking which folders have a specific file, which we call the anchor.

download

Downloads a file from Google Cloud Storage and returns the local file path.

open_file

Downloads and opens a file from Google Cloud Storage. Returns a file handle.

open_image

Downloads and returns the PIL image from Google Cloud Storage.

Pathing

The path to specify is relative to the bucket, which is frdc-ds by default.

For example this filesystem on GCS:

+ load.gcs | Documentation

Documentation 0.0.7 Help

load.gcs

Usage

These are defined in the top-level load.gcs module.

list_gcs_datasets

Lists all datasets in the bucket as a DataFrame. This works by checking which folders have a specific file, which we call the anchor.

download

Downloads a file from Google Cloud Storage and returns the local file path.

open_file

Downloads and opens a file from Google Cloud Storage. Returns a file handle.

open_image

Downloads and returns the PIL image from Google Cloud Storage.

Pathing

The path to specify is relative to the bucket, which is frdc-ds by default.

For example this filesystem on GCS:

# On Google Cloud Storage frdc-ds ├── chestnut_nature_park │ └── 20201218 │ └── 90deg │ └── bounds.json -

To download bounds.json, use download(r"chestnut_nature_park/20201218/90deg/bounds.json"). By default, all files will be downloaded to PROJ_DIR/rsc/....

+

To download bounds.json, use download(r"chestnut_nature_park/20201218/90deg/bounds.json"). By default, all files will be downloaded to PROJ_DIR/rsc/....

# On local filesystem PROJ_DIR ├── rsc @@ -13,4 +13,4 @@ │ └── 20201218 │ └── 90deg │ └── bounds.json -

Configuration

If you need granular control over

  • where the files are downloaded

  • the credentials used

  • the project used

  • the bucket used

Then edit conf.py.

GCS_CREDENTIALS

Google Cloud credentials.


A google.oauth2.service_account.Credentials object. See the object documentation for more information.

LOCAL_DATASET_ROOT_DIR

Local directory to download files to.


Path to a directory, or a Path object.

GCS_PROJECT_ID

Google Cloud project ID.


GCS_BUCKET_NAME

Google Cloud Storage bucket name.


Last modified: 20 December 2023
\ No newline at end of file +

Configuration

If you need granular control over

  • where the files are downloaded

  • the credentials used

  • the project used

  • the bucket used

Then edit conf.py.

GCS_CREDENTIALS

Google Cloud credentials.


A google.oauth2.service_account.Credentials object. See the object documentation for more information.

LOCAL_DATASET_ROOT_DIR

Local directory to download files to.


Path to a directory, or a Path object.

GCS_PROJECT_ID

Google Cloud project ID.


GCS_BUCKET_NAME

Google Cloud Storage bucket name.


Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/mix-match-module.html b/docs/mix-match-module.html index f622c5c5..e8d5df2e 100644 --- a/docs/mix-match-module.html +++ b/docs/mix-match-module.html @@ -1,4 +1,4 @@ - MixMatch Module | Documentation

Documentation 0.0.7 Help

MixMatch Module

See frdc/train/mixmatch_module.py.

Quick Recap

We will go over the essential parts of the code here. Before that, we revise some of the concepts that are used in the code.

Abstract Methods

In Python, we can define abstract methods using the abc module. Just like other OOP languages, abstract methods are methods that must be implemented by the child class.

For example:

+ MixMatch Module | Documentation

Documentation 0.0.7 Help

MixMatch Module

See frdc/train/mixmatch_module.py.

Quick Recap

We will go over the essential parts of the code here. Before that, we revise some of the concepts that are used in the code.

Abstract Methods

In Python, we can define abstract methods using the abc module. Just like other OOP languages, abstract methods are methods that must be implemented by the child class.

For example:

from abc import ABC, abstractmethod @@ -11,7 +11,7 @@ class MyChildClass(MyAbstractClass): def my_abstract_method(self): print("Hello World!") -

nn.Module & LightningModule

If you're unfamiliar with PyTorch, you should read the nn.Module Documentation.

nn.Module is the base class for all neural network modules in PyTorch. While LightningModule is a PyTorch Lightning class that extends nn.Module, providing it with additional functionality that reduces boilerplate code.

By implementing it as a LightningModule, we also enter the PyTorch Lightning ecosystem, which provides us with a lot of useful features such as logging, early stopping, and more.

What do we implement in a Module?

One key component that nn.Module requires, is the model. So for example:

+

nn.Module & LightningModule

If you're unfamiliar with PyTorch, you should read the nn.Module Documentation.

nn.Module is the base class for all neural network modules in PyTorch. While LightningModule is a PyTorch Lightning class that extends nn.Module, providing it with additional functionality that reduces boilerplate code.

By implementing it as a LightningModule, we also enter the PyTorch Lightning ecosystem, which provides us with a lot of useful features such as logging, early stopping, and more.

What do we implement in a Module?

One key component that nn.Module requires, is the model. So for example:

class MyModule(nn.Module): def __init__(self): super().__init__() @@ -23,7 +23,7 @@ def forward(self, x): return self.model(x) -

PyTorch Lightning builds on top of it, requiring training_step and validation_step. Each "step" is a batch of data, and the model is trained on it. So for example:

+

PyTorch Lightning builds on top of it, requiring training_step and validation_step. Each "step" is a batch of data, and the model is trained on it. So for example:

class MyModule(LightningModule): def __init__(self): ... @@ -40,7 +40,7 @@ y_hat = self(x) loss = F.cross_entropy(y_hat, y) return loss -

Usually, the training and validation steps are the same, but in some cases, such as MixMatch, they are different. In MixMatch, we not only use a different loss function for train, we also handle a batch differently. The PyTorch Lightning framework allows us to separate the two, and implement them separately.

Model Embedded Preprocessing on_before_batch_transfer

In PyTorch Lightning, we can also inject a step before the batch is passed to the model. This is done by overriding the on_before_batch_transfer method.

Batch
on_before_batch_transfer
training_step
validation_step

This allows us to do preprocessing on the batch, such as scaling the data, encoding the labels, and more.

Custom EMA Update on_after_backward

We also leverage another hook, called on_after_backward. This hook is called after the backward pass, and allows us to do custom operations. In our case, we use it to update the EMA model.

Batch
training_step
on_after_backward
update_ema

MixMatch

We recommend having tests/model_tests/chestnut_dec_may/train.py open while reading this section. It implements a real-world example of MixMatch.

As a summary:

  1. We learned what is an abstract method, and how to implement it

  2. We implement the model in LightningModule much like we would in nn.Module

  3. We implement on_before_batch_transfer to preprocess the batch

  4. Finally, we implement on_after_backward to update the EMA model

With the above in mind, let's look at the MixMatch implementation.

forward (abstract)

Forward pass of the model

ema_model (abstract)

The model that is used for EMA. We expect this property to be implemented by the child class.

update_ema (abstract)

The method to update the EMA model. We expect this method to be implemented by the child class.

loss_unl_scaler (static)

Takes in the current progress of the training, 0.0 to 1.0, where 0.0 is the start of the training, and 1.0 is the end. Then, returns the multiplier for the unlabeled loss.

loss_lbl (static)

Implements the loss for labeled data. Takes in the predicted labels and the ground truth labels, and returns the loss. This is cross entropy for MixMatch.

loss_unl (static)

Implements the loss for unlabeled data. Takes in the predicted labels and the ground truth labels, and returns the loss. This is MSE for MixMatch.

mixup

Takes in the data and the labels, the beta distribution parameter, and returns the mixed data and labels.

sharpen

Takes in the labels and temperature, and returns the sharpened labels.

guess_labels

Takes in the unlabeled data, and returns the guessed labels.

progress

The current progress of the training, 0.0 to 1.0, where 0.0 is the start of the training, and 1.0 is the end.

training_step

The training step runs through 1 batch of data, and returns the loss. Note that this is significantly different from validation step, as we handle the K-Augmented data differently.

test / validation_step

The test / validation step runs through 1 batch of data, and returns the loss.

predict_step

The predict step runs through 1 batch of data, and returns the actual decoded labels.

on_after_backward

The on_after_backward hook is called after the backward pass, and allows us to do custom operations. In our case, we use it to update the EMA model.

on_before_batch_transfer

The on_before_batch_transfer hook is called before the batch is transferred to the GPU. In our case, we use it to preprocess the batch.

A diagram of how these components interact with each other is shown below:

Batch
on_before_batch_transfer
training_step
guess_labels
sharpen
mix_up
loss_unl
loss_unl_scaler
loss
loss_lbl
backward
on_after_backward
update_ema
validation_step
loss

Finally, we show an example of how to use the MixMatch module:

+

Usually, the training and validation steps are the same, but in some cases, such as MixMatch, they are different. In MixMatch, we not only use a different loss function for train, we also handle a batch differently. The PyTorch Lightning framework allows us to separate the two, and implement them separately.

Model Embedded Preprocessing on_before_batch_transfer

In PyTorch Lightning, we can also inject a step before the batch is passed to the model. This is done by overriding the on_before_batch_transfer method.

Batch
on_before_batch_transfer
training_step
validation_step

This allows us to do preprocessing on the batch, such as scaling the data, encoding the labels, and more.

Custom EMA Update on_after_backward

We also leverage another hook, called on_after_backward. This hook is called after the backward pass, and allows us to do custom operations. In our case, we use it to update the EMA model.

Batch
training_step
on_after_backward
update_ema

MixMatch

We recommend having tests/model_tests/chestnut_dec_may/train.py open while reading this section. It implements a real-world example of MixMatch.

As a summary:

  1. We learned what is an abstract method, and how to implement it

  2. We implement the model in LightningModule much like we would in nn.Module

  3. We implement on_before_batch_transfer to preprocess the batch

  4. Finally, we implement on_after_backward to update the EMA model

With the above in mind, let's look at the MixMatch implementation.

forward (abstract)

Forward pass of the model

ema_model (abstract)

The model that is used for EMA. We expect this property to be implemented by the child class.

update_ema (abstract)

The method to update the EMA model. We expect this method to be implemented by the child class.

loss_unl_scaler (static)

Takes in the current progress of the training, 0.0 to 1.0, where 0.0 is the start of the training, and 1.0 is the end. Then, returns the multiplier for the unlabeled loss.

loss_lbl (static)

Implements the loss for labeled data. Takes in the predicted labels and the ground truth labels, and returns the loss. This is cross entropy for MixMatch.

loss_unl (static)

Implements the loss for unlabeled data. Takes in the predicted labels and the ground truth labels, and returns the loss. This is MSE for MixMatch.

mixup

Takes in the data and the labels, the beta distribution parameter, and returns the mixed data and labels.

sharpen

Takes in the labels and temperature, and returns the sharpened labels.

guess_labels

Takes in the unlabeled data, and returns the guessed labels.

progress

The current progress of the training, 0.0 to 1.0, where 0.0 is the start of the training, and 1.0 is the end.

training_step

The training step runs through 1 batch of data, and returns the loss. Note that this is significantly different from validation step, as we handle the K-Augmented data differently.

test / validation_step

The test / validation step runs through 1 batch of data, and returns the loss.

predict_step

The predict step runs through 1 batch of data, and returns the actual decoded labels.

on_after_backward

The on_after_backward hook is called after the backward pass, and allows us to do custom operations. In our case, we use it to update the EMA model.

on_before_batch_transfer

The on_before_batch_transfer hook is called before the batch is transferred to the GPU. In our case, we use it to preprocess the batch.

A diagram of how these components interact with each other is shown below:

Batch
on_before_batch_transfer
training_step
guess_labels
sharpen
mix_up
loss_unl
loss_unl_scaler
loss
loss_lbl
backward
on_after_backward
update_ema
validation_step
loss

Finally, we show an example of how to use the MixMatch module:

from sklearn.preprocessing import StandardScaler, OrdinalEncoder from frdc.train.mixmatch_module import MixMatchModule @@ -60,7 +60,7 @@ sharpen_temp=0.5, mix_beta_alpha=0.75, ) -

In particular, we need to supply some transformations for the preprocessing step. In this case, we use StandardScaler to scale the data, and OrdinalEncoder to encode the labels.

  1. It's best if standardization is done only on the training data, and not the validation data to better fit real-world scenarios.

  2. We use OrdinalEncoder as it handles unseen labels. So if a class doesn't show up in the training data, it will be encoded as np.nan, and will not participate in the loss calculation.

Design Choices

Static Method Overriding

We implement many functions as static, as we believe that a functional style reduces dependencies, thus making the code easier to test and debug.

Furthermore, it allows the subclasses to easily override the functions, to customize the behavior of the MixMatch module.

For example, the loss_unl_scaler function is static, thus, we can implement our own scaling function, and pass it to the MixMatch module.

+

In particular, we need to supply some transformations for the preprocessing step. In this case, we use StandardScaler to scale the data, and OrdinalEncoder to encode the labels.

  1. It's best if standardization is done only on the training data, and not the validation data to better fit real-world scenarios.

  2. We use OrdinalEncoder as it handles unseen labels. So if a class doesn't show up in the training data, it will be encoded as np.nan, and will not participate in the loss calculation.

Design Choices

Static Method Overriding

We implement many functions as static, as we believe that a functional style reduces dependencies, thus making the code easier to test and debug.

Furthermore, it allows the subclasses to easily override the functions, to customize the behavior of the MixMatch module.

For example, the loss_unl_scaler function is static, thus, we can implement our own scaling function, and pass it to the MixMatch module.

def my_loss_unl_scaler(progress: float) -> float: return progress ** 2 @@ -68,4 +68,4 @@ @staticmethod def loss_unl_scaler(progress: float) -> float: return my_loss_unl_scaler(progress) -

If we had used a method instead, we would have to consider instance state, which would make it harder to override.

Why not use Dataclasses?

One of the biggest caveats of nn.Module is that it requires super().__init__() to be called before anything is assigned. While dataclass can leverage __post_init__ to do the same, we felt that this was too much of a hassle to save a few keystrokes. Thus, we opted to use __init__ instead, while more verbose, it is more explicit.

Why use PyTorch Lightning?

While we did hit some road blocks implementing SSL, due to its complex and unconventional nature, we felt that the benefits of using PyTorch Lightning outweighed the cons.

on_before_batch_transfer and on_after_backward are unconventional hooks, and we had to do some digging to find them. It can be argued that by just writing explicit code, we can avoid the need for these hooks, but the PyTorch ecosystem fixes many other issues, so we closed an eye on this.

References

Last modified: 20 December 2023
\ No newline at end of file +

If we had used a method instead, we would have to consider instance state, which would make it harder to override.

Why not use Dataclasses?

One of the biggest caveats of nn.Module is that it requires super().__init__() to be called before anything is assigned. While dataclass can leverage __post_init__ to do the same, we felt that this was too much of a hassle to save a few keystrokes. Thus, we opted to use __init__ instead, while more verbose, it is more explicit.

Why use PyTorch Lightning?

While we did hit some road blocks implementing SSL, due to its complex and unconventional nature, we felt that the benefits of using PyTorch Lightning outweighed the cons.

on_before_batch_transfer and on_after_backward are unconventional hooks, and we had to do some digging to find them. It can be argued that by just writing explicit code, we can avoid the need for these hooks, but the PyTorch ecosystem fixes many other issues, so we closed an eye on this.

References

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/mix-match.html b/docs/mix-match.html index dfe195be..a1dd4670 100644 --- a/docs/mix-match.html +++ b/docs/mix-match.html @@ -1 +1 @@ - MixMatch | Documentation

Documentation 0.0.7 Help

MixMatch

In FRDC-ML, we leverage semi-supervised learning to improve the model's performance through better augmentation consistency and using even unlabelled data.

The algorithm we use is MixMatch. A state-of-the-art semi-supervised learning algorithm. It is based on the idea of consistency regularization, which encourages models to predict the same class even after augmentations that occur naturally in the real world.

Our implementation of MixMatch is a refactored version of YU1ut/MixMatch-pytorch We've refactored the code to follow more modern PyTorch practices, allowing us to utilize it with modern PyTorch frameworks such as PyTorch Lightning.

We won't go through the details of MixMatch here, see Our Documentation in our MixMatch-PyTorch-CIFAR10 repository for more details.

Implementation Details

  1. How we implemented the MixMatch logic MixMatchModule

  2. How we implemented the unique MixMatch data loading logic Custom MixMatch Data Loading

References

Last modified: 20 December 2023
\ No newline at end of file + MixMatch | Documentation

Documentation 0.0.7 Help

MixMatch

In FRDC-ML, we leverage semi-supervised learning to improve the model's performance through better augmentation consistency and using even unlabelled data.

The algorithm we use is MixMatch. A state-of-the-art semi-supervised learning algorithm. It is based on the idea of consistency regularization, which encourages models to predict the same class even after augmentations that occur naturally in the real world.

Our implementation of MixMatch is a refactored version of YU1ut/MixMatch-pytorch We've refactored the code to follow more modern PyTorch practices, allowing us to utilize it with modern PyTorch frameworks such as PyTorch Lightning.

We won't go through the details of MixMatch here, see Our Documentation in our MixMatch-PyTorch-CIFAR10 repository for more details.

Implementation Details

  1. How we implemented the MixMatch logic MixMatchModule

  2. How we implemented the unique MixMatch data loading logic Custom MixMatch Data Loading

References

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/model-test-chestnut-may-dec.html b/docs/model-test-chestnut-may-dec.html index fc6ce98e..57959a5f 100644 --- a/docs/model-test-chestnut-may-dec.html +++ b/docs/model-test-chestnut-may-dec.html @@ -1 +1 @@ - Model Test Chestnut May-Dec | Documentation

Documentation 0.0.7 Help

Model Test Chestnut May-Dec

This test is used to evaluate the model performance on the Chestnut Nature Park May & December dataset.

See this script in model_tests/chestnut_dec_may/train.py.

Motivation

The usage of this model will be to classify trees in unseen datasets under different conditions. In this test, we'll evaluate it under a different season.

A caveat is that it'll be evaluated on the same set of trees, so it's not a representative of a field-test. However, given difficulties of yielding datasets, this still gives us a good preliminary idea of how the model will perform in different conditions.

Methodology

We train on the December dataset, and test on the May dataset.

Labelled Train
Unlabelled Train
Test
DecDataset
Model
MayDataset

Despite not having any true unlabelled data, we use MixMatch by treating the labelled data of the December dataset as unlabelled data.

Model

The current Model used is a simple InceptionV3 Transfer Learning model, with the last layer replaced with a fully connected layer(s).

SSL Loss
Input
InceptionV3 Frozen
FC Layer(s)
Softmax
Output

Preprocessing

For Training:

Segment
RandomCrop 299
Horizontal Flip 50%
Vertical Flip 50%
Normalize By Training Mean & Std

For Validation:

Segment
CenterCrop 299
Normalize By Training Mean & Std

For Evaluation:

Segment
CenterCrop 299
Normalize By Training Mean & Std
As Is
Horizontal Flip
Vertical Flip
Horizontal & Vertical Flip

For evaluation, we evaluate that the model should be invariant to horizontal and vertical flips, as well as the original image.

Hyperparameters

The following hyperparameters are used:

  • Optimizer: Adam

  • Learning Rate: 1e-3

  • Batch Size: 32

  • Epochs: 10

  • Train Iterations: 25~100

  • Validation Iterations: 10~25

  • Early Stopping: 4

Results

We evaluate around 40% accuracy on the test set, compared to 100% for the training set. This indicates that the model has saturated and is not able to learn anymore from the training set. There's no indication of overfitting as the validation loss just plateaus.

W&B Dashboard

Caveats

  • The test set is very small, so the results are not very representative.

  • The test set is the same set of trees, so it's not a true test of the model performance in different conditions.

  • There are many classes with 1 sample, so the model may not be able to learn the features of these classes well.

Last modified: 20 December 2023
\ No newline at end of file + Model Test Chestnut May-Dec | Documentation

Documentation 0.0.7 Help

Model Test Chestnut May-Dec

This test is used to evaluate the model performance on the Chestnut Nature Park May & December dataset.

See this script in model_tests/chestnut_dec_may/train.py.

Motivation

The usage of this model will be to classify trees in unseen datasets under different conditions. In this test, we'll evaluate it under a different season.

A caveat is that it'll be evaluated on the same set of trees, so it's not a representative of a field-test. However, given difficulties of yielding datasets, this still gives us a good preliminary idea of how the model will perform in different conditions.

Methodology

We train on the December dataset, and test on the May dataset.

Labelled Train
Unlabelled Train
Test
DecDataset
Model
MayDataset

Despite not having any true unlabelled data, we use MixMatch by treating the labelled data of the December dataset as unlabelled data.

Model

The current Model used is a simple InceptionV3 Transfer Learning model, with the last layer replaced with a fully connected layer(s).

SSL Loss
Input
InceptionV3 Frozen
FC Layer(s)
Softmax
Output

Preprocessing

For Training:

Segment
RandomCrop 299
Horizontal Flip 50%
Vertical Flip 50%
Normalize By Training Mean & Std

For Validation:

Segment
CenterCrop 299
Normalize By Training Mean & Std

For Evaluation:

Segment
CenterCrop 299
Normalize By Training Mean & Std
As Is
Horizontal Flip
Vertical Flip
Horizontal & Vertical Flip

For evaluation, we evaluate that the model should be invariant to horizontal and vertical flips, as well as the original image.

Hyperparameters

The following hyperparameters are used:

  • Optimizer: Adam

  • Learning Rate: 1e-3

  • Batch Size: 32

  • Epochs: 10

  • Train Iterations: 25~100

  • Validation Iterations: 10~25

  • Early Stopping: 4

Results

We evaluate around 40% accuracy on the test set, compared to 100% for the training set. This indicates that the model has saturated and is not able to learn anymore from the training set. There's no indication of overfitting as the validation loss just plateaus.

W&B Dashboard

Caveats

  • The test set is very small, so the results are not very representative.

  • The test set is the same set of trees, so it's not a true test of the model performance in different conditions.

  • There are many classes with 1 sample, so the model may not be able to learn the features of these classes well.

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/overview.html b/docs/overview.html index 56310435..11b6ffd2 100644 --- a/docs/overview.html +++ b/docs/overview.html @@ -1 +1 @@ - Overview | Documentation

Documentation 0.0.7 Help

Overview

Forest Recovery Digital Companion (FRDC) is a ML-assisted companion for ecologists to automatically classify surveyed trees via an Unmanned Aerial Vehicle (UAV).

This package, FRDC-ML is the Machine Learning backbone of this project, a centralized repository of tools and model architectures to be used in the FRDC pipeline.

Get started here

Other Projects

FRDC-UI

The User Interface Repository for FRDC, a WebApp GUI for ecologists to adjust annotations.

Last modified: 20 December 2023
\ No newline at end of file + Overview | Documentation

Documentation 0.0.7 Help

Overview

Forest Recovery Digital Companion (FRDC) is a ML-assisted companion for ecologists to automatically classify surveyed trees via an Unmanned Aerial Vehicle (UAV).

This package, FRDC-ML is the Machine Learning backbone of this project, a centralized repository of tools and model architectures to be used in the FRDC pipeline.

Get started here

Other Projects

FRDC-UI

The User Interface Repository for FRDC, a WebApp GUI for ecologists to adjust annotations.

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/preprocessing-extract-segments.html b/docs/preprocessing-extract-segments.html index 6801c5e6..a2c6eff0 100644 --- a/docs/preprocessing-extract-segments.html +++ b/docs/preprocessing-extract-segments.html @@ -1,4 +1,4 @@ - preprocessing.extract_segments | Documentation

Documentation 0.0.7 Help

preprocessing.extract_segments

Functions

extract_segments_from_labels

Extracts segments from a label classification.

extract_segments_from_bounds

Extracts segments from Rect bounds.

remove_small_segments_from_labels

Removes small segments from a label classification.

Extract with Boundaries

A boundary is a Rect object that represents the minimum bounding box of a segment, with x0, y0, x1, y1 coordinates.

It simply slices the original image to the bounding box. The origin is the top left corner of the image.

+ preprocessing.extract_segments | Documentation

Documentation 0.0.7 Help

preprocessing.extract_segments

Functions

extract_segments_from_labels

Extracts segments from a label classification.

extract_segments_from_bounds

Extracts segments from Rect bounds.

remove_small_segments_from_labels

Removes small segments from a label classification.

Extract with Boundaries

A boundary is a Rect object that represents the minimum bounding box of a segment, with x0, y0, x1, y1 coordinates.

It simply slices the original image to the bounding box. The origin is the top left corner of the image.

+-----------------+ +-----------+ | Original | | Segmented | | Image | | Image | @@ -9,7 +9,7 @@ +-----+-----+-----+ 1, 2, 0, 2 +-----+-----+ | 7 | 8 | 9 | x0 y0 x1 y1 | 8 | 9 | +-----+-----+-----+ +-----+-----+ -
+
+-----------------+ +-----------------+ | Original | | Segmented | | Image | | Image | @@ -20,7 +20,7 @@ +-----+-----+-----+ 1, 2, 0, 2 +-----+-----+-----+ | 7 | 8 | 9 | x0 y0 x1 y1 | 0 | 8 | 9 | +-----+-----+-----+ +-----+-----+-----+ -

Extract with Labels

A label classification is a np.ndarray where each pixel is mapped to a segment. The segments are mapped to a unique integer. In our project, the 0th label is the background.

For example, a label classification of 3 segments will look like this:

+

Extract with Labels

A label classification is a np.ndarray where each pixel is mapped to a segment. The segments are mapped to a unique integer. In our project, the 0th label is the background.

For example, a label classification of 3 segments will look like this:

+-----------------+ +-----------------+ | Label | | Original | | Classification | | Image | @@ -31,7 +31,7 @@ +-----+-----+-----+ +-----+-----+-----+ | 1 | 1 | 0 | | 7 | 8 | 9 | +-----+-----+-----+ +-----+-----+-----+ -

The extraction will take the minimum bounding box of each segment and return a list of segments.

For example, the label 1 and 2 extracted images will be

+

The extraction will take the minimum bounding box of each segment and return a list of segments.

For example, the label 1 and 2 extracted images will be

+-----------+ +-----------+ | Extracted | | Extracted | | Segment 1 | | Segment 2 | @@ -42,7 +42,7 @@ +-----+-----+ +-----+-----+ | 7 | 8 | +-----+-----+ -
+
+-----------------+ +-----------------+ | Extracted | | Extracted | | Segment 1 | | Segment 2 | @@ -53,7 +53,7 @@ +-----+-----+-----+ +-----+-----+-----+ | 7 | 8 | 0 | | 0 | 0 | 0 | +-----+-----+-----+ +-----+-----+-----+ -
  • If cropped is False, the segments are padded with 0s to the original image size. While this can ensure shape consistency, it can consume more memory for large images.

  • If cropped is True, the segments are cropped to the minimum bounding box. This can save memory, but the shape of the segments will be inconsistent.

Usage

Extract from Bounds and Labels

Extract segments from bounds and labels.

+
  • If cropped is False, the segments are padded with 0s to the original image size. While this can ensure shape consistency, it can consume more memory for large images.

  • If cropped is True, the segments are cropped to the minimum bounding box. This can save memory, but the shape of the segments will be inconsistent.

Usage

Extract from Bounds and Labels

Extract segments from bounds and labels.

import numpy as np from frdc.load import FRDCDataset from frdc.preprocess.extract_segments import extract_segments_from_bounds @@ -65,7 +65,7 @@ bounds, labels = ds.get_bounds_and_labels() segments: list[np.ndarray] = extract_segments_from_bounds(ar, bounds) -

Extract from Auto-Segmentation

Extract segments from a label classification.

+

Extract from Auto-Segmentation

Extract segments from a label classification.

from skimage.morphology import remove_small_objects, remove_small_holes import numpy as np @@ -91,4 +91,4 @@ min_height=10, min_width=10) segments: list[np.ndarray] = extract_segments_from_labels(ar, ar_labels) -

API

extract_segments_from_labels(ar, ar_labels, cropped)

Extracts segments from a label classification.


ar_labels is a label classification as a np.ndarray

extract_segments_from_bounds(ar, bounds, cropped)

Extracts segments from Rect bounds.


bounds is a list of Rect bounds.

remove_small_segments_from_labels(ar_labels, min_height, min_width)

Removes small segments from a label classification.


Last modified: 20 December 2023
\ No newline at end of file +

API

extract_segments_from_labels(ar, ar_labels, cropped)

Extracts segments from a label classification.


ar_labels is a label classification as a np.ndarray

extract_segments_from_bounds(ar, bounds, cropped)

Extracts segments from Rect bounds.


bounds is a list of Rect bounds.

remove_small_segments_from_labels(ar_labels, min_height, min_width)

Removes small segments from a label classification.


Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/preprocessing-glcm-padded.html b/docs/preprocessing-glcm-padded.html index ff0639fd..ed865bac 100644 --- a/docs/preprocessing-glcm-padded.html +++ b/docs/preprocessing-glcm-padded.html @@ -1,4 +1,4 @@ - preprocessing.glcm_padded | Documentation

Documentation 0.0.7 Help

preprocessing.glcm_padded

Functions

glcm_padded

Computes the GLCM of the NDArray bands with padding.

glcm_padded_cached

Computes the GLCM of the NDArray bands with padding, and caches it.

append_glcm_padded_cached

Computes the GLCM of the NDArray bands with padding, and caches it and also appends it onto the original array.

Usage

We show a few examples of how to use the GLCM functions.

+ preprocessing.glcm_padded | Documentation

Documentation 0.0.7 Help

preprocessing.glcm_padded

Functions

glcm_padded

Computes the GLCM of the NDArray bands with padding.

glcm_padded_cached

Computes the GLCM of the NDArray bands with padding, and caches it.

append_glcm_padded_cached

Computes the GLCM of the NDArray bands with padding, and caches it and also appends it onto the original array.

Usage

We show a few examples of how to use the GLCM functions.

import numpy as np from glcm_cupy import Features @@ -23,4 +23,4 @@ ar_glcm_cached_appended = append_glcm_padded_cached(ar, bin_from=1, bin_to=4, radius=3) -
  • ar_glcm is the GLCM of the original array, with the last dimension being the GLCM features. The number of features is determined by the features parameter, which defaults to all features.

  • ar_glcm_2_features selects only 2 features, with the last dimension being the 2 GLCM features specified.

  • ar_glcm_cached caches the GLCM so that if you call it again, it will return the cached version. It stores its data at the project root dir, under .cache/.

  • ar_glcm_cached_appended is a wrapper around ar_glcm_cached, it appends the GLCM features onto the original array. It's equivalent to calling ar_glcm_cached and then np.concatenate on the final axes.

Caching

GLCM is an expensive operation, thus we recommend to cache it if the input parameters will be the same. This is especially useful if you're experimenting with the same dataset with constant parameters.

API

glcm_padded(ar, bin_from, bin_to, radius, step_size, features)

Computes the GLCM of the NDArray bands with padding.


  • ar is the input array

  • bin_from is the upper bound of the input

  • bin_to is the upper bound of the GLCM input, i.e. the resolution that GLCM operates on

  • radius is the radius of the GLCM

  • step_size is the step size of the GLCM

  • features is the list of GLCM features to compute

The return shape is

See glcm_cupy for the GLCM Features.

glcm_padded_cached(ar, bin_from, bin_to, radius, step_size, features)

Computes the GLCM of the NDArray bands with padding, and caches it.


See glcm_padded for the parameters and output shape

append_glcm_padded_cached(ar, bin_from, bin_to, radius, step_size, features)

Computes the GLCM of the NDArray bands with padding, and caches it and also appends it onto the original array.


See glcm_padded for the parameters


The return shape is:

The function automatically flattens the last 2 dimensions of the GLCM features, and appends it onto the original array.

Last modified: 20 December 2023
\ No newline at end of file +
  • ar_glcm is the GLCM of the original array, with the last dimension being the GLCM features. The number of features is determined by the features parameter, which defaults to all features.

  • ar_glcm_2_features selects only 2 features, with the last dimension being the 2 GLCM features specified.

  • ar_glcm_cached caches the GLCM so that if you call it again, it will return the cached version. It stores its data at the project root dir, under .cache/.

  • ar_glcm_cached_appended is a wrapper around ar_glcm_cached, it appends the GLCM features onto the original array. It's equivalent to calling ar_glcm_cached and then np.concatenate on the final axes.

Caching

GLCM is an expensive operation, thus we recommend to cache it if the input parameters will be the same. This is especially useful if you're experimenting with the same dataset with constant parameters.

API

glcm_padded(ar, bin_from, bin_to, radius, step_size, features)

Computes the GLCM of the NDArray bands with padding.


  • ar is the input array

  • bin_from is the upper bound of the input

  • bin_to is the upper bound of the GLCM input, i.e. the resolution that GLCM operates on

  • radius is the radius of the GLCM

  • step_size is the step size of the GLCM

  • features is the list of GLCM features to compute

The return shape is

See glcm_cupy for the GLCM Features.

glcm_padded_cached(ar, bin_from, bin_to, radius, step_size, features)

Computes the GLCM of the NDArray bands with padding, and caches it.


See glcm_padded for the parameters and output shape

append_glcm_padded_cached(ar, bin_from, bin_to, radius, step_size, features)

Computes the GLCM of the NDArray bands with padding, and caches it and also appends it onto the original array.


See glcm_padded for the parameters


The return shape is:

The function automatically flattens the last 2 dimensions of the GLCM features, and appends it onto the original array.

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/preprocessing-morphology.html b/docs/preprocessing-morphology.html index edc343b4..6697539f 100644 --- a/docs/preprocessing-morphology.html +++ b/docs/preprocessing-morphology.html @@ -1,4 +1,4 @@ - preprocessing.morphology | Documentation

Documentation 0.0.7 Help

preprocessing.morphology

Functions

threshold_binary_mask

Thresholds a selected NDArray bands to yield a binary mask.

binary_watershed

Performs watershed on a binary mask to yield a mapped label classification

Usage

Perform auto-segmentation on a dataset to yield a label classification.

+ preprocessing.morphology | Documentation

Documentation 0.0.7 Help

preprocessing.morphology

Functions

threshold_binary_mask

Thresholds a selected NDArray bands to yield a binary mask.

binary_watershed

Performs watershed on a binary mask to yield a mapped label classification

Usage

Perform auto-segmentation on a dataset to yield a label classification.

from frdc.load import FRDCDataset from frdc.preprocess.morphology import ( threshold_binary_mask, binary_watershed @@ -10,6 +10,6 @@ ar, order = ds.get_ar_bands() mask = threshold_binary_mask(ar, order.index('NIR'), 90 / 256) ar_label = binary_watershed(mask) -

API

threshold_binary_mask(ar, band_idx, threshold_value)

Thresholds a selected NDArray bands to yield a binary mask as np.ndarray


This is equivalent to

+

API

threshold_binary_mask(ar, band_idx, threshold_value)

Thresholds a selected NDArray bands to yield a binary mask as np.ndarray


This is equivalent to

ar[..., band_idx] > threshold_value -
binary_watershed(ar_mask, peaks_footprint, watershed_compactness)

Performs watershed on a binary mask to yield a mapped label classification as a np.ndarray


  • peaks_footprint is the footprint of skimage.feature.peak_local_max

  • watershed_compactness is the compactness of skimage.morphology.watershed

Last modified: 20 December 2023
\ No newline at end of file +
binary_watershed(ar_mask, peaks_footprint, watershed_compactness)

Performs watershed on a binary mask to yield a mapped label classification as a np.ndarray


  • peaks_footprint is the footprint of skimage.feature.peak_local_max

  • watershed_compactness is the compactness of skimage.morphology.watershed

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/preprocessing-scale.html b/docs/preprocessing-scale.html index c8213f8e..b846445e 100644 --- a/docs/preprocessing-scale.html +++ b/docs/preprocessing-scale.html @@ -1,4 +1,4 @@ - preprocessing.scale | Documentation

Documentation 0.0.7 Help

preprocessing.scale

Functions

scale_0_1_per_band

Scales the NDArray bands to [0, 1] per band.

scale_normal_per_band

Scales the NDArray bands to zero mean unit variance per band.

scale_static_per_band

Scales the NDArray bands by a predefined configuration. Take a look at frdc.conf.BAND_MAX_CONFIG for an example.

Usage

+ preprocessing.scale | Documentation

Documentation 0.0.7 Help

preprocessing.scale

Functions

scale_0_1_per_band

Scales the NDArray bands to [0, 1] per band.

scale_normal_per_band

Scales the NDArray bands to zero mean unit variance per band.

scale_static_per_band

Scales the NDArray bands by a predefined configuration. Take a look at frdc.conf.BAND_MAX_CONFIG for an example.

Usage

from frdc.load import FRDCDataset from frdc.preprocess.scale import ( scale_0_1_per_band, scale_normal_per_band, scale_static_per_band @@ -12,4 +12,4 @@ ar_01 = scale_0_1_per_band(ar) ar_norm = scale_normal_per_band(ar) ar_static = scale_static_per_band(ar, order, BAND_MAX_CONFIG) -
Last modified: 20 December 2023
\ No newline at end of file +
Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/retrieve-our-datasets.html b/docs/retrieve-our-datasets.html index 644e51f5..bbeecb47 100644 --- a/docs/retrieve-our-datasets.html +++ b/docs/retrieve-our-datasets.html @@ -1,10 +1,10 @@ - Retrieve our Datasets | Documentation

Documentation 0.0.7 Help

Retrieve our Datasets

In this tutorial, we'll learn how to :

  • Retrieve FRDC's Hyperspectral Image Data as np.ndarray

  • Retrieve FRDC's Ground Truth bounds and labels

  • Slice/segment the image data by the bounds

Prerequisites

  • New here? Get Started.

  • Setup the Google Cloud Authorization to download the data.

Retrieve the Data

To retrieve the data, use FRDCDataset

Here, we'll download and load our

  • ar: Hyperspectral Image Data

  • order: The order of the bands

  • bounds: The bounds of the trees (segments)

  • labels: The labels of the trees (segments)

+ Retrieve our Datasets | Documentation

Documentation 0.0.7 Help

Retrieve our Datasets

In this tutorial, we'll learn how to :

  • Retrieve FRDC's Hyperspectral Image Data as np.ndarray

  • Retrieve FRDC's Ground Truth bounds and labels

  • Slice/segment the image data by the bounds

Prerequisites

  • New here? Get Started.

  • Setup the Google Cloud Authorization to download the data.

Retrieve the Data

To retrieve the data, use FRDCDataset

Here, we'll download and load our

  • ar: Hyperspectral Image Data

  • order: The order of the bands

  • bounds: The bounds of the trees (segments)

  • labels: The labels of the trees (segments)

from frdc.load.dataset import FRDCDataset ds = FRDCDataset(site="chestnut_nature_park", date="20201218", version=None) ar, order = ds.get_ar_bands() bounds, labels = ds.get_bounds_and_labels() -

What Datasets are there?

+

What Datasets are there?

from frdc.load.gcs import list_gcs_datasets print(list_gcs_datasets()) # 0 DEBUG/0 @@ -12,7 +12,7 @@ # 2 casuarina/20220418/93deg # 3 chestnut_nature_park/20201218 # ... -
  • The first part of the path is the site, and the second part is the date.

  • The version is the rest of the path, if there isn't any, use None.

  • site="ds"

  • date="date"

  • version="ver"

  • site="ds"

  • date="date"

  • version="ver/01/data"

  • site="ds"

  • date="date"

  • version=None

Segment the Data

To segment the data, use Extract Segments.

Here, we'll segment the data by the bounds.

+
  • The first part of the path is the site, and the second part is the date.

  • The version is the rest of the path, if there isn't any, use None.

  • site="ds"

  • date="date"

  • version="ver"

  • site="ds"

  • date="date"

  • version="ver/01/data"

  • site="ds"

  • date="date"

  • version=None

Segment the Data

To segment the data, use Extract Segments.

Here, we'll segment the data by the bounds.

from frdc.load.dataset import FRDCDataset from frdc.preprocess.extract_segments import extract_segments_from_bounds @@ -20,7 +20,7 @@ ar, order = ds.get_ar_bands() bounds, labels = ds.get_bounds_and_labels() segments = extract_segments_from_bounds(ar, bounds) -

segments is a list of np.ndarray of shape H, W, C, representing a tree. The order of segments is the same as labels, so you can use labels to identify the tree.

Plot the Data (Optional)

We can then use these data to plot out the first tree segment.

+

segments is a list of np.ndarray of shape H, W, C, representing a tree. The order of segments is the same as labels, so you can use labels to identify the tree.

Plot the Data (Optional)

We can then use these data to plot out the first tree segment.

import matplotlib.pyplot as plt from frdc.load.dataset import FRDCDataset @@ -38,4 +38,4 @@ plt.imshow(segment_0_rgb_scaled) plt.title(f"Tree {labels[0]}") plt.show() -

See also: preprocessing.scale.scale_0_1_per_band

MatPlotLib cannot show the data correctly as-is, so we need to

  • Convert the data from BGR to RGB

  • Scale the data to 0-1 per band

Last modified: 20 December 2023
\ No newline at end of file +

See also: preprocessing.scale.scale_0_1_per_band

MatPlotLib cannot show the data correctly as-is, so we need to

  • Convert the data from BGR to RGB

  • Scale the data to 0-1 per band

Last modified: 26 December 2023
\ No newline at end of file diff --git a/docs/site.webmanifest b/docs/site.webmanifest new file mode 100644 index 00000000..fe6a9303 --- /dev/null +++ b/docs/site.webmanifest @@ -0,0 +1,11 @@ +{ + "name": "JetBrains", + "short_name": "JetBrains", + "icons": [ + { "src": "icon-192.png", "type": "image/png", "sizes": "192x192" }, + { "src": "icon-512.png", "type": "image/png", "sizes": "512x512" } + ], + "theme_color": "#000000", + "background_color": "#000000", + "display": "standalone" +} \ No newline at end of file diff --git a/docs/train-frdc-lightning.html b/docs/train-frdc-lightning.html index ebcd6054..1886de79 100644 --- a/docs/train-frdc-lightning.html +++ b/docs/train-frdc-lightning.html @@ -1,4 +1,4 @@ - train.frdc_datamodule & frdc_module | Documentation

Documentation 0.0.7 Help

train.frdc_datamodule & frdc_module

These are FRDC specific LightningDataModule and LightningModule, a core component in the PyTorch Lightning ecosystem to provide a simple interface to train and evaluate models.

Classes

FRDCDataModule

The FRDC PyTorch Lightning DataModule.

FRDCModule

The FRDC PyTorch Lightning Module.

Usage

API

FRDCDataModule(segments, labels, preprocess, augmentation, train_val_test_split, batch_size)

Initializes the FRDC PyTorch Lightning DataModule.


  • segments, labels are retrieved from

  • preprocess is a function that takes in a segment and returns a preprocessed segment. In particular, it should accept a list of NumPy NDArrays and return a single stacked PyToch Tensor.

  • augmentation is a function that takes in a segment and returns an augmented segment. In particular, it takes in a PyTorch Tensor and returns another.

  • train_val_test_split is a function that takes a TensorDataset and returns a list of 3 TensorDatasets, for train, val and test respectively.

  • batch_size is the batch size.

FRDCModule(model_cls, model_kwargs, optim_cls, optim_kwargs)

Initializes the FRDC PyTorch Lightning Module.


  • model_cls is the Class of the model.

  • model_kwargs is the kwargs to pass to the model.

  • optim_cls is the Class of the optimizer.

  • optim_kwargs is the kwargs to pass to the optimizer.

Internally, the module will initialize the model and optimizer as follows:

+ train.frdc_datamodule & frdc_module | Documentation

Documentation 0.0.7 Help

train.frdc_datamodule & frdc_module

These are FRDC specific LightningDataModule and LightningModule, a core component in the PyTorch Lightning ecosystem to provide a simple interface to train and evaluate models.

Classes

FRDCDataModule

The FRDC PyTorch Lightning DataModule.

FRDCModule

The FRDC PyTorch Lightning Module.

Usage

API

FRDCDataModule(segments, labels, preprocess, augmentation, train_val_test_split, batch_size)

Initializes the FRDC PyTorch Lightning DataModule.


  • segments, labels are retrieved from

  • preprocess is a function that takes in a segment and returns a preprocessed segment. In particular, it should accept a list of NumPy NDArrays and return a single stacked PyToch Tensor.

  • augmentation is a function that takes in a segment and returns an augmented segment. In particular, it takes in a PyTorch Tensor and returns another.

  • train_val_test_split is a function that takes a TensorDataset and returns a list of 3 TensorDatasets, for train, val and test respectively.

  • batch_size is the batch size.

FRDCModule(model_cls, model_kwargs, optim_cls, optim_kwargs)

Initializes the FRDC PyTorch Lightning Module.


  • model_cls is the Class of the model.

  • model_kwargs is the kwargs to pass to the model.

  • optim_cls is the Class of the optimizer.

  • optim_kwargs is the kwargs to pass to the optimizer.

Internally, the module will initialize the model and optimizer as follows:

model = model_cls(**model_kwargs) optim = optim_cls(model.parameters(), **optim_kwargs) -
Last modified: 20 December 2023
\ No newline at end of file +
Last modified: 26 December 2023
\ No newline at end of file From 3ad231b037edec6effe1208ede4bc258e6a26dbd Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 15:47:27 +0800 Subject: [PATCH 30/35] Update README.md --- README.md | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 9ddd6c92..d497d13a 100644 --- a/README.md +++ b/README.md @@ -54,14 +54,6 @@ To illustrate this, take a look at how `tests/model_tests/chestnut_dec_may/train.py` is written. It pulls in relevant modules from each stage and constructs a pipeline. - -> Initially, we evaluated a few ML E2E solutions, despite them offering great -> functionality, their flexibility was -> limited. From a dev perspective, **Active Learning** was a gray area, and we -> foresee heavy shoehorning. -> Ultimately, we decided that the risk was too great, thus we resort to -> creating our own solution. - ## Contributing ### Pre-commit Hooks @@ -80,3 +72,5 @@ If you're using `pip` instead of `poetry`, run the following commands: pip install pre-commit pre-commit install ``` + +Alternatively, you can use Black configured with your own IDE. From 3eb0b40a792286b235486c6baeee3ac0f14e82f9 Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 15:53:17 +0800 Subject: [PATCH 31/35] Update devcontainer.json --- .devcontainer/devcontainer.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index ca281b81..5ca24e93 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,8 +1,7 @@ { "name": "frdc", "build": { - "context": "../", - "dockerfile": "Dockerfile", + "dockerfile": "../Dockerfile", }, "containerEnv": { "LABEL_STUDIO_HOST": "host.docker.internal", From d021af7cbb825c88fcb8885bd118f6f51cfca35c Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 16:29:08 +0800 Subject: [PATCH 32/35] Attempt to fix codespace problem --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9951dc95..6b535c26 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,8 +3,8 @@ WORKDIR /devcontainer COPY ./pyproject.toml /devcontainer/pyproject.toml -RUN apt-get update -RUN apt-get install git -y +RUN apt update && apt upgrade +RUN apt install git -y RUN pip3 install --upgrade pip && \ pip3 install poetry && \ @@ -16,5 +16,5 @@ RUN conda init bash \ && poetry config virtualenvs.create false \ && poetry install --with dev --no-interaction --no-ansi -RUN apt-get install curl -y && curl -sSL https://sdk.cloud.google.com | bash +RUN apt install curl -y && curl -sSL https://sdk.cloud.google.com | bash ENV PATH $PATH:/root/google-cloud-sdk/bin From 2636cf1454a0d2a34b770829fa55f558d242e4ef Mon Sep 17 00:00:00 2001 From: Evening Date: Tue, 26 Dec 2023 16:38:02 +0800 Subject: [PATCH 33/35] Update Dockerfile --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 6b535c26..526daecb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ WORKDIR /devcontainer COPY ./pyproject.toml /devcontainer/pyproject.toml -RUN apt update && apt upgrade +RUN apt update -y && apt upgrade -y RUN apt install git -y RUN pip3 install --upgrade pip && \ From bac614adb1e19d71d512e4d52824038146755b7f Mon Sep 17 00:00:00 2001 From: Eve-ning Date: Wed, 27 Dec 2023 16:24:11 +0800 Subject: [PATCH 34/35] Force Dockerfile to LF --- .gitattributes | 0 Dockerfile | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 .gitattributes mode change 100644 => 100755 Dockerfile diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..e69de29b diff --git a/Dockerfile b/Dockerfile old mode 100644 new mode 100755 From 399fc54ea766b9c1dcb9699cac337a5908ce895e Mon Sep 17 00:00:00 2001 From: Evening Date: Wed, 27 Dec 2023 16:24:51 +0800 Subject: [PATCH 35/35] Force Dockerfile to LF --- .gitattributes | 1 + Dockerfile | 0 2 files changed, 1 insertion(+) mode change 100755 => 100644 Dockerfile diff --git a/.gitattributes b/.gitattributes index e69de29b..d28cb2fc 100644 --- a/.gitattributes +++ b/.gitattributes @@ -0,0 +1 @@ +Dockerfile text=auto eol=lf \ No newline at end of file diff --git a/Dockerfile b/Dockerfile old mode 100755 new mode 100644