From 3ca0697081c6cb510eb6d43eb7769605b1bc49ad Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Thu, 5 Sep 2019 18:58:40 +0300 Subject: [PATCH 01/15] add python script --- {{cookiecutter.project_slug}}/neu.py | 179 +++++++++++++++++++++++++++ 1 file changed, 179 insertions(+) create mode 100644 {{cookiecutter.project_slug}}/neu.py diff --git a/{{cookiecutter.project_slug}}/neu.py b/{{cookiecutter.project_slug}}/neu.py new file mode 100644 index 00000000..56532ff0 --- /dev/null +++ b/{{cookiecutter.project_slug}}/neu.py @@ -0,0 +1,179 @@ +from utils import run # TODO: form a normal Python package + +CODE_PATH = "__test_project" +DATA_PATH = "data" +NOTEBOOKS_PATH = "notebooks" +REQUIREMENTS_PATH = "requirements" +RESULTS_PATH = "results" +PROJECT_PATH_STORAGE = "storage:__test_project" # TODO : <- change htis value +CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" +DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" +NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" +REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" +RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" + +PROJECT_PATH_ENV = "/project" +CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" +DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" +NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" +REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" +RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" + +SETUP_NAME = "setup" +TRAINING_NAME = "training" +JUPYTER_NAME = "jupyter" +TENSORBOARD_NAME = "tensorboard" +FILEBROWSER_NAME = "filebrowser" + +BASE_ENV_NAME = "image:neuro/base" +CUSTOM_ENV_NAME = "image:neuro/custom" + + + +##### SETUP ##### + +def setup(): + run(f"neuro kill {SETUP_NAME}") + cmd = ( + f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " + f"--volume {PROJECT_PATH_STORAGE}:{PROJECT_PATH_ENV}:ro " + f"{BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' + ) + run(cmd) + run(f"neuro cp -r {REQUIREMENTS_PATH} {REQUIREMENTS_PATH_STORAGE}") + # For some reason the second command fail + # neuro exec {SETUP_NAME} 'apt-get update' + # neuro exec {SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' + run(f"neuro exec {SETUP_NAME} 'pip install -r {REQUIREMENTS_PATH_ENV}/pip.txt'") + run(f"neuro job save {SETUP_NAME} {CUSTOM_ENV_NAME}") + run(f"neuro kill {SETUP_NAME}") + + +##### STORAGE ##### + +def upload_code(): + run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_STORAGE}") + +def clean_code(): + run(f"neuro rm -r {CODE_PATH_STORAGE}") + +def upload_data(): + run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") + +def clean_data(): + run(f"neuro rm -r {DATA_PATH_STORAGE}") + +def upload_notebooks(): + run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") + +def download_notebooks(): + run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") + +def clean_notebooks(): + run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") + +def upload(): + upload_code() + upload_data() + upload_notebooks() + +def clean(): + clean_code() + clean_data() + clean_notebooks() + +##### JOBS ##### + +def run_training(): + cmd = ( + f"python {CODE_PATH_ENV}/train.py --log_dir " + f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" + ) + run(f"neuro run --name {TRAINING_NAME} --preset gpu-small " + f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + +def kill_training(): + run(f"neuro kill {TRAINING_NAME}") + +def connect_training(): + run(f"neuro exec {TRAINING_NAME} bash") + +def run_jupyter(): + cmd = ( + f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " + f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" + ) + run(f"neuro run " + f"--name {JUPYTER_NAME} " + f"--preset gpu-small " + f"--http 8888 --no-http-auth --detach " + f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " + f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + run(f"neuro job browse {JUPYTER_NAME}") + +def kill_jupyter(): + run(f"neuro kill {JUPYTER_NAME}") + +def run_tensorboard(): + cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" + run(f"neuro run " + f"--name {TENSORBOARD_NAME} " + f"--preset cpu-small " + f"--http 6006 --no-http-auth --detach " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + run(f"neuro job browse {TENSORBOARD_NAME}") + +def kill_tensorboard(): + run(f"neuro kill {TENSORBOARD_NAME}") + +def run_filebrowser(): + run(f"neuro run " + f"--name {FILEBROWSER_NAME} " + f"--preset cpu-small " + f"--http 80 --no-http-auth --detach " + f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " + f"filebrowser/filebrowser" + ) + run(f"neuro job browse {FILEBROWSER_NAME}") + +def kill_filebrowser(): + run(f"neuro kill {FILEBROWSER_NAME}") + +def kill(): + kill_training() + kill_jupyter() + kill_tensorboard() + kill_filebrowser() + +##### LOCAL ##### + +def setup_local(): + run("pip install -r requirements/pip.txt") + +def lint(): + run("flake8 .") + run("mypy .") + +def install(): + run("python setup.py install --user") + +##### MISC ##### + +def ps(): + run(f"neuro ps") + +if __name__ == "__main__": + setup() \ No newline at end of file From 794e5d58ac20d88aa01c0af50f24121b5f624022 Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Thu, 5 Sep 2019 19:00:45 +0300 Subject: [PATCH 02/15] add missing --- {{cookiecutter.project_slug}}/utils.py | 127 +++++++++++++++++++++++++ 1 file changed, 127 insertions(+) create mode 100644 {{cookiecutter.project_slug}}/utils.py diff --git a/{{cookiecutter.project_slug}}/utils.py b/{{cookiecutter.project_slug}}/utils.py new file mode 100644 index 00000000..11cdd6e8 --- /dev/null +++ b/{{cookiecutter.project_slug}}/utils.py @@ -0,0 +1,127 @@ +import inspect +import logging +import re +import shlex +import signal +import subprocess +import typing as t +from collections import namedtuple +from contextlib import contextmanager +from pathlib import Path +from time import sleep +from uuid import uuid4 + + +OUT_DIRECTORY_NAME = "out" +SUBMITTED_JOBS_FILE_NAME = "submitted_jobs.txt" + +DEFAULT_TIMEOUT = 5 * 60 + +SysCap = namedtuple("SysCap", "out err") + + +job_id_pattern = re.compile( + # pattern for UUID v4 taken here: https://stackoverflow.com/a/38191078 + r"(job-[0-9a-f]{8}-[0-9a-f]{4}-[4][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12})", + re.IGNORECASE, +) + + +def get_submitted_jobs_file() -> Path: + project_root = Path(__file__).resolve().parent + out_path = project_root / OUT_DIRECTORY_NAME + return out_path / SUBMITTED_JOBS_FILE_NAME + + +SUBMITTED_JOBS_FILE = get_submitted_jobs_file() + +log = logging.getLogger(__name__) + + +def split_command(cmd: str) -> t.List[str]: + return shlex.split(cmd) + +def random_str(length: int) -> str: + assert 0 <= length <= 32, length + return uuid4().hex[:length] + + +def generate_job_name() -> str: + postfix = f"-{random_str(4)}" + return inspect.stack()[1].function.replace("_", "-") + postfix + + +@contextmanager +def timeout(time_s: int) -> t.Iterator[None]: + """ source: https://www.jujens.eu/posts/en/2018/Jun/02/python-timeout-function/ + """ + + def raise_timeout() -> t.NoReturn: + raise TimeoutError + + # Register a function to raise a TimeoutError on the signal. + signal.signal(signal.SIGALRM, raise_timeout) # type: ignore + # Schedule the signal to be sent after ``time``. + signal.alarm(time_s) + + try: + yield + except TimeoutError: + pass + finally: + # Unregister the signal so it won't be triggered + # if the timeout is not reached. + signal.signal(signal.SIGALRM, signal.SIG_IGN) + + +def fire_and_forget(cmd: str) -> subprocess.Popen: + proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) + return proc + + +def wait_for_output( + cmd: str, expect_stdin: str, timeout_s: int = DEFAULT_TIMEOUT +) -> None: + delay_s = 1 + with timeout(timeout_s): + while True: + try: + captured = run(cmd, timeout_s=timeout_s // 5) + if captured.err: + print(f"stderr: `{captured.err}`") + except subprocess.CalledProcessError as e: + log.error(f"Caught error: {e}, retrying") + continue + if expect_stdin in captured.out: + return + sleep(delay_s) + + +def run(cmd: str, timeout_s: int = DEFAULT_TIMEOUT) -> SysCap: + log.info(f"Runing command: '{cmd}'") + print(f"Runing command: '{cmd}'") # TODO : debug <-- + args = shlex.split(cmd) + proc = subprocess.run( + args, + timeout=timeout_s, + encoding="utf8", + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + try: + proc.check_returncode() + except subprocess.CalledProcessError: + log.error(f"Last stdout: '{proc.stdout}'") + log.error(f"Last stderr: '{proc.stderr}'") + raise + out = proc.stdout + err = proc.stderr + if any(start in " ".join(args) for start in ("submit", "run")): + match = job_id_pattern.search(out) + if match: + job_id = match.group(1) + with SUBMITTED_JOBS_FILE.open("a") as f: + f.write(job_id + "\n") + out = out.strip() + err = err.strip() + return SysCap(out, err) From c7bd62cffdc7df4643e679752bc8819e7a8eeb21 Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 11:46:48 +0300 Subject: [PATCH 03/15] add project structure --- neuro-shortcuts/.gitignore | 11 +++++ neuro-shortcuts/Makefile | 22 +++++++++ .../neuro_shortcuts}/neu.py | 46 ++++++++++++++++--- .../neuro_shortcuts}/utils.py | 1 + neuro-shortcuts/requirements-test.txt | 4 ++ neuro-shortcuts/setup.cfg | 34 ++++++++++++++ neuro-shortcuts/setup.py | 11 +++++ neuro-shortcuts/tests/__init__.py | 0 8 files changed, 122 insertions(+), 7 deletions(-) create mode 100644 neuro-shortcuts/.gitignore create mode 100644 neuro-shortcuts/Makefile rename {{{cookiecutter.project_slug}} => neuro-shortcuts/neuro_shortcuts}/neu.py (95%) rename {{{cookiecutter.project_slug}} => neuro-shortcuts/neuro_shortcuts}/utils.py (99%) create mode 100644 neuro-shortcuts/requirements-test.txt create mode 100644 neuro-shortcuts/setup.cfg create mode 100644 neuro-shortcuts/setup.py create mode 100644 neuro-shortcuts/tests/__init__.py diff --git a/neuro-shortcuts/.gitignore b/neuro-shortcuts/.gitignore new file mode 100644 index 00000000..59f4055b --- /dev/null +++ b/neuro-shortcuts/.gitignore @@ -0,0 +1,11 @@ +/.env/ +/venv/ +pip-wheel-metadata +.pytest_cache +*.swo +*.egg-info +**/__pycache__ +.eggs +.mypy_cache +.tmontmp +.testmondata diff --git a/neuro-shortcuts/Makefile b/neuro-shortcuts/Makefile new file mode 100644 index 00000000..cd5e1914 --- /dev/null +++ b/neuro-shortcuts/Makefile @@ -0,0 +1,22 @@ +ISORT_DIRS := neuro_shortcuts setup.py +BLACK_DIRS := $(ISORT_DIRS) +MYPY_DIRS := tests + +setup: + pip install --disable-pip-version-check -r requirements-test.txt + +.PHONY: lint +lint: + isort -c -rc ${ISORT_DIRS} + black --check $(BLACK_DIRS) + mypy $(MYPY_DIRS) + flake8 $(FLAKE8_DIRS) + +.PHONY: format +format: + isort -rc $(ISORT_DIRS) + black $(BLACK_DIRS) + +.PHONY: test +test: + pytest -v tests/ diff --git a/{{cookiecutter.project_slug}}/neu.py b/neuro-shortcuts/neuro_shortcuts/neu.py similarity index 95% rename from {{cookiecutter.project_slug}}/neu.py rename to neuro-shortcuts/neuro_shortcuts/neu.py index 56532ff0..05e09520 100644 --- a/{{cookiecutter.project_slug}}/neu.py +++ b/neuro-shortcuts/neuro_shortcuts/neu.py @@ -1,4 +1,5 @@ -from utils import run # TODO: form a normal Python package +from .utils import run + CODE_PATH = "__test_project" DATA_PATH = "data" @@ -29,9 +30,9 @@ CUSTOM_ENV_NAME = "image:neuro/custom" - ##### SETUP ##### + def setup(): run(f"neuro kill {SETUP_NAME}") cmd = ( @@ -51,45 +52,57 @@ def setup(): ##### STORAGE ##### + def upload_code(): run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_STORAGE}") + def clean_code(): run(f"neuro rm -r {CODE_PATH_STORAGE}") + def upload_data(): run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") + def clean_data(): run(f"neuro rm -r {DATA_PATH_STORAGE}") + def upload_notebooks(): run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") + def download_notebooks(): run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") + def clean_notebooks(): run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") + def upload(): upload_code() upload_data() upload_notebooks() + def clean(): clean_code() clean_data() clean_notebooks() + ##### JOBS ##### + def run_training(): cmd = ( f"python {CODE_PATH_ENV}/train.py --log_dir " f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" ) - run(f"neuro run --name {TRAINING_NAME} --preset gpu-small " + run( + f"neuro run --name {TRAINING_NAME} --preset gpu-small " f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " @@ -97,18 +110,22 @@ def run_training(): f"'{cmd}'" ) + def kill_training(): run(f"neuro kill {TRAINING_NAME}") + def connect_training(): run(f"neuro exec {TRAINING_NAME} bash") + def run_jupyter(): cmd = ( f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" ) - run(f"neuro run " + run( + f"neuro run " f"--name {JUPYTER_NAME} " f"--preset gpu-small " f"--http 8888 --no-http-auth --detach " @@ -121,12 +138,15 @@ def run_jupyter(): ) run(f"neuro job browse {JUPYTER_NAME}") + def kill_jupyter(): run(f"neuro kill {JUPYTER_NAME}") + def run_tensorboard(): cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" - run(f"neuro run " + run( + f"neuro run " f"--name {TENSORBOARD_NAME} " f"--preset cpu-small " f"--http 6006 --no-http-auth --detach " @@ -136,11 +156,14 @@ def run_tensorboard(): ) run(f"neuro job browse {TENSORBOARD_NAME}") + def kill_tensorboard(): run(f"neuro kill {TENSORBOARD_NAME}") + def run_filebrowser(): - run(f"neuro run " + run( + f"neuro run " f"--name {FILEBROWSER_NAME} " f"--preset cpu-small " f"--http 80 --no-http-auth --detach " @@ -149,31 +172,40 @@ def run_filebrowser(): ) run(f"neuro job browse {FILEBROWSER_NAME}") + def kill_filebrowser(): run(f"neuro kill {FILEBROWSER_NAME}") + def kill(): kill_training() kill_jupyter() kill_tensorboard() kill_filebrowser() + ##### LOCAL ##### + def setup_local(): run("pip install -r requirements/pip.txt") + def lint(): run("flake8 .") run("mypy .") + def install(): run("python setup.py install --user") + ##### MISC ##### + def ps(): run(f"neuro ps") + if __name__ == "__main__": - setup() \ No newline at end of file + setup() diff --git a/{{cookiecutter.project_slug}}/utils.py b/neuro-shortcuts/neuro_shortcuts/utils.py similarity index 99% rename from {{cookiecutter.project_slug}}/utils.py rename to neuro-shortcuts/neuro_shortcuts/utils.py index 11cdd6e8..47595aaa 100644 --- a/{{cookiecutter.project_slug}}/utils.py +++ b/neuro-shortcuts/neuro_shortcuts/utils.py @@ -41,6 +41,7 @@ def get_submitted_jobs_file() -> Path: def split_command(cmd: str) -> t.List[str]: return shlex.split(cmd) + def random_str(length: int) -> str: assert 0 <= length <= 32, length return uuid4().hex[:length] diff --git a/neuro-shortcuts/requirements-test.txt b/neuro-shortcuts/requirements-test.txt new file mode 100644 index 00000000..a8e0eb2c --- /dev/null +++ b/neuro-shortcuts/requirements-test.txt @@ -0,0 +1,4 @@ +#neuromation==19.9.2 +flake8==3.7.8 +isort==4.3.21 +black==19.3b0 diff --git a/neuro-shortcuts/setup.cfg b/neuro-shortcuts/setup.cfg new file mode 100644 index 00000000..1bef14f7 --- /dev/null +++ b/neuro-shortcuts/setup.cfg @@ -0,0 +1,34 @@ +[flake8] +exclude = .git,.env,venv,__pycache__,.eggs +max-line-length = 88 +ignore = N801,N802,N803,E252,W503,E133,E203 + +[isort] +line_length=88 +include_trailing_comma=True +multi_line_output=3 +force_grid_wrap=0 +combine_as_imports=True +lines_after_imports=2 +known_standard_library=dataclasses +known_third_party=aiohttp,async_timeout,pytest + +[mypy] +check_untyped_defs = True +disallow_any_generics = True +disallow_untyped_defs = True +follow_imports = silent +strict_optional = True +warn_redundant_casts = True +warn_unused_ignores = True +warn_unused_configs = True +incremental = False + +[mypy-pytest] +ignore_missing_imports = true + +[mypy-timeout_decorator] +ignore_missing_imports = true + +[mypy-pexpect] +ignore_missing_imports = true diff --git a/neuro-shortcuts/setup.py b/neuro-shortcuts/setup.py new file mode 100644 index 00000000..22cc34ba --- /dev/null +++ b/neuro-shortcuts/setup.py @@ -0,0 +1,11 @@ +from setuptools import find_packages, setup + + +setup( + name="neuro-shortcuts", + version="0.0.1b1", + packages=find_packages(), + python_requires=">=3.7.0", + install_requires=(), + entry_points={"pytest11": []}, +) diff --git a/neuro-shortcuts/tests/__init__.py b/neuro-shortcuts/tests/__init__.py new file mode 100644 index 00000000..e69de29b From 139f92bc185f9e8ca0a4dff5aaadb0284b28f42c Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 12:09:58 +0300 Subject: [PATCH 04/15] introduce projects --- neuro-shortcuts/neuro_shortcuts/neu.py | 108 ++++++++++++++++++------- 1 file changed, 79 insertions(+), 29 deletions(-) diff --git a/neuro-shortcuts/neuro_shortcuts/neu.py b/neuro-shortcuts/neuro_shortcuts/neu.py index 05e09520..3ee23f7e 100644 --- a/neuro-shortcuts/neuro_shortcuts/neu.py +++ b/neuro-shortcuts/neuro_shortcuts/neu.py @@ -1,43 +1,93 @@ -from .utils import run - +import abc +from dataclasses import dataclass -CODE_PATH = "__test_project" -DATA_PATH = "data" -NOTEBOOKS_PATH = "notebooks" -REQUIREMENTS_PATH = "requirements" -RESULTS_PATH = "results" -PROJECT_PATH_STORAGE = "storage:__test_project" # TODO : <- change htis value -CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" -DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" -NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" -REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" -RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" - -PROJECT_PATH_ENV = "/project" -CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" -DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" -NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" -REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" -RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" +from .utils import run -SETUP_NAME = "setup" -TRAINING_NAME = "training" -JUPYTER_NAME = "jupyter" -TENSORBOARD_NAME = "tensorboard" -FILEBROWSER_NAME = "filebrowser" -BASE_ENV_NAME = "image:neuro/base" -CUSTOM_ENV_NAME = "image:neuro/custom" +class Project(abc.ABC): + @abc.abstractmethod + def root(self) -> str: + pass + + @property + def data(self) -> str: + return f"{self.root}/data" + @property + def code(self) -> str: + return f"{self.root}/code" + @property + def notebooks(self) -> str: + return f"{self.root}/notebooks" + @property + def requirements(self) -> str: + return f"{self.root}/requirements" + @property + def results(self) -> str: + return f"{self.root}/results" + + +class StorageProject(Project): + def __init__(self, project_name: str) -> None: + self._project_name = project_name + + def root(self) -> str: + return f"storage:{self._project_name}" + +class LocalProject(Project): + def __init__(self): + # TODO (artem) remember `pwd` as `self._project_path` + pass + def root(self) -> str: + # TODO: return self._project_path + raise NotImplemented() + +class ContainerProject(Project): + def root(self) -> str: + # TODO: always in the root? + return "/project" + +@dataclass +class Config: + local_project: LocalProject + storage_project: LocalProject + container_project: LocalProject + + SETUP_NAME = "setup" + TRAINING_NAME = "training" + JUPYTER_NAME = "jupyter" + TENSORBOARD_NAME = "tensorboard" + FILEBROWSER_NAME = "filebrowser" + BASE_ENV_NAME = "image:neuro/base" + CUSTOM_ENV_NAME = "image:neuro/custom" + +# CODE_PATH = "__test_project" +# DATA_PATH = "data" +# NOTEBOOKS_PATH = "notebooks" +# REQUIREMENTS_PATH = "requirements" +# RESULTS_PATH = "results" +# PROJECT_PATH_STORAGE = "storage:__test_project" +# CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" +# DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" +# NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" +# REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" +# RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" +# +# PROJECT_PATH_ENV = "/project" +# CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" +# DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" +# NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" +# REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" +# RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" ##### SETUP ##### -def setup(): +def setup(cfg: Config): run(f"neuro kill {SETUP_NAME}") cmd = ( f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " - f"--volume {PROJECT_PATH_STORAGE}:{PROJECT_PATH_ENV}:ro " + f"--volume {storage_project.root}:{container_project.root}:ro " f"{BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' ) run(cmd) From 4102d48683b87ff3e1af2ab788d4b92decb14dfc Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 12:18:10 +0300 Subject: [PATCH 05/15] cleanup --- .circleci/config.yml | 4 +- neuro-shortcuts/Makefile | 2 +- neuro-shortcuts/neuro_shortcuts/neu.py | 317 +++++++++--------- ...irements-test.txt => requirements-dev.txt} | 1 + 4 files changed, 170 insertions(+), 154 deletions(-) rename neuro-shortcuts/{requirements-test.txt => requirements-dev.txt} (84%) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6d4723e6..133ff967 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -23,7 +23,7 @@ jobs: # Download and cache dependencies - restore_cache: keys: - - v1-dependencies-{{ checksum "requirements-dev.txt" }} + - v1-dependencies-{{ checksum "requirements-requirements-dev.txt" }} # fallback to using the latest cache if no exact match is found - v1-dependencies- @@ -37,7 +37,7 @@ jobs: - save_cache: paths: - ./venv - key: v1-dependencies-{{ checksum "requirements-dev.txt" }} + key: v1-dependencies-{{ checksum "requirements-requirements-dev.txt" }} - run: name: run tests diff --git a/neuro-shortcuts/Makefile b/neuro-shortcuts/Makefile index cd5e1914..a54332c9 100644 --- a/neuro-shortcuts/Makefile +++ b/neuro-shortcuts/Makefile @@ -3,7 +3,7 @@ BLACK_DIRS := $(ISORT_DIRS) MYPY_DIRS := tests setup: - pip install --disable-pip-version-check -r requirements-test.txt + pip install --disable-pip-version-check -r requirements-dev.txt .PHONY: lint lint: diff --git a/neuro-shortcuts/neuro_shortcuts/neu.py b/neuro-shortcuts/neuro_shortcuts/neu.py index 3ee23f7e..c37b53c7 100644 --- a/neuro-shortcuts/neuro_shortcuts/neu.py +++ b/neuro-shortcuts/neuro_shortcuts/neu.py @@ -12,15 +12,19 @@ def root(self) -> str: @property def data(self) -> str: return f"{self.root}/data" + @property def code(self) -> str: return f"{self.root}/code" + @property def notebooks(self) -> str: return f"{self.root}/notebooks" + @property def requirements(self) -> str: return f"{self.root}/requirements" + @property def results(self) -> str: return f"{self.root}/results" @@ -33,25 +37,29 @@ def __init__(self, project_name: str) -> None: def root(self) -> str: return f"storage:{self._project_name}" + class LocalProject(Project): def __init__(self): # TODO (artem) remember `pwd` as `self._project_path` pass + def root(self) -> str: # TODO: return self._project_path raise NotImplemented() - + + class ContainerProject(Project): def root(self) -> str: # TODO: always in the root? return "/project" + @dataclass class Config: - local_project: LocalProject - storage_project: LocalProject - container_project: LocalProject - + local: LocalProject + storage: LocalProject + container: LocalProject + SETUP_NAME = "setup" TRAINING_NAME = "training" JUPYTER_NAME = "jupyter" @@ -60,201 +68,208 @@ class Config: BASE_ENV_NAME = "image:neuro/base" CUSTOM_ENV_NAME = "image:neuro/custom" -# CODE_PATH = "__test_project" -# DATA_PATH = "data" -# NOTEBOOKS_PATH = "notebooks" -# REQUIREMENTS_PATH = "requirements" -# RESULTS_PATH = "results" -# PROJECT_PATH_STORAGE = "storage:__test_project" -# CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" -# DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" -# NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" -# REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" -# RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" -# -# PROJECT_PATH_ENV = "/project" -# CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" -# DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" -# NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" -# REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" -# RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" - ##### SETUP ##### def setup(cfg: Config): - run(f"neuro kill {SETUP_NAME}") + run(f"neuro kill {cfg.SETUP_NAME}") cmd = ( - f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " - f"--volume {storage_project.root}:{container_project.root}:ro " - f"{BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' + f"neuro run --name {cfg.SETUP_NAME} --preset cpu-small --detach " + f"--volume {cfg.storage.root}:{cfg.container.root}:ro " + f"{cfg.BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' ) run(cmd) - run(f"neuro cp -r {REQUIREMENTS_PATH} {REQUIREMENTS_PATH_STORAGE}") + run(f"neuro cp -r {cfg.local.requirements} {cfg.storage.requirements}") + # TODO: see below # For some reason the second command fail - # neuro exec {SETUP_NAME} 'apt-get update' - # neuro exec {SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' - run(f"neuro exec {SETUP_NAME} 'pip install -r {REQUIREMENTS_PATH_ENV}/pip.txt'") - run(f"neuro job save {SETUP_NAME} {CUSTOM_ENV_NAME}") - run(f"neuro kill {SETUP_NAME}") + # neuro exec {cfg.SETUP_NAME} 'apt-get update' + # neuro exec {cfg.SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' + run( + f"neuro exec {cfg.SETUP_NAME} 'pip install -r {cfg.container.requirements}/pip.txt'" + ) + run(f"neuro job save {cfg.SETUP_NAME} {cfg.CUSTOM_ENV_NAME}") + run(f"neuro kill {cfg.SETUP_NAME}") ##### STORAGE ##### -def upload_code(): - run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_STORAGE}") +def upload_code(cfg: Config) -> None: + run(f"neuro cp -r -T {cfg.local.code} {cfg.container.code}") -def clean_code(): - run(f"neuro rm -r {CODE_PATH_STORAGE}") +# TODO: redundant? clean where? locally? +def clean_code(cfg: Config) -> None: + # run(f"neuro rm -r {CODE_PATH_STORAGE}") + raise NotImplemented() -def upload_data(): - run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") +def upload_data(cfg: Config) -> None: + # run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") + raise NotImplemented() -def clean_data(): - run(f"neuro rm -r {DATA_PATH_STORAGE}") +def clean_data(cfg: Config) -> None: + # run(f"neuro rm -r {DATA_PATH_STORAGE}") + raise NotImplemented() -def upload_notebooks(): - run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") +def upload_notebooks(cfg: Config) -> None: + # run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") + raise NotImplemented() -def download_notebooks(): - run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") +def download_notebooks(cfg: Config) -> None: + # run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") + raise NotImplemented() -def clean_notebooks(): - run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") +def clean_notebooks(cfg: Config) -> None: + # run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") + raise NotImplemented() -def upload(): - upload_code() - upload_data() - upload_notebooks() +def upload(cfg: Config) -> None: + # upload_code() + # upload_data() + # upload_notebooks() + raise NotImplemented() -def clean(): - clean_code() - clean_data() - clean_notebooks() +def clean(cfg: Config) -> None: + # clean_code() + # clean_data() + # clean_notebooks() + raise NotImplemented() ##### JOBS ##### -def run_training(): - cmd = ( - f"python {CODE_PATH_ENV}/train.py --log_dir " - f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" - ) - run( - f"neuro run --name {TRAINING_NAME} --preset gpu-small " - f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - - -def kill_training(): - run(f"neuro kill {TRAINING_NAME}") - - -def connect_training(): - run(f"neuro exec {TRAINING_NAME} bash") - - -def run_jupyter(): - cmd = ( - f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " - f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" - ) - run( - f"neuro run " - f"--name {JUPYTER_NAME} " - f"--preset gpu-small " - f"--http 8888 --no-http-auth --detach " - f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " - f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - run(f"neuro job browse {JUPYTER_NAME}") - - -def kill_jupyter(): - run(f"neuro kill {JUPYTER_NAME}") - - -def run_tensorboard(): - cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" - run( - f"neuro run " - f"--name {TENSORBOARD_NAME} " - f"--preset cpu-small " - f"--http 6006 --no-http-auth --detach " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - run(f"neuro job browse {TENSORBOARD_NAME}") - - -def kill_tensorboard(): - run(f"neuro kill {TENSORBOARD_NAME}") - - -def run_filebrowser(): - run( - f"neuro run " - f"--name {FILEBROWSER_NAME} " - f"--preset cpu-small " - f"--http 80 --no-http-auth --detach " - f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " - f"filebrowser/filebrowser" - ) - run(f"neuro job browse {FILEBROWSER_NAME}") - - -def kill_filebrowser(): - run(f"neuro kill {FILEBROWSER_NAME}") - - -def kill(): - kill_training() - kill_jupyter() - kill_tensorboard() - kill_filebrowser() +def run_training(cfg: Config) -> None: + # cmd = ( + # f"python {CODE_PATH_ENV}/train.py --log_dir " + # f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" + # ) + # run( + # f"neuro run --name {cfg.TRAINING_NAME} --preset gpu-small " + # f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + # f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " + # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + # f"{cfg.CUSTOM_ENV_NAME} " + # f"'{cmd}'" + # ) + raise NotImplemented() + + +def kill_training(cfg: Config) -> None: + # run(f"neuro kill {cfg.TRAINING_NAME}") + raise NotImplemented() + + +def connect_training(cfg: Config) -> None: + # run(f"neuro exec {cfg.TRAINING_NAME} bash") + raise NotImplemented() + + +def run_jupyter(cfg: Config) -> None: + # cmd = ( + # f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " + # f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" + # ) + # run( + # f"neuro run " + # f"--name {cfg.JUPYTER_NAME} " + # f"--preset gpu-small " + # f"--http 8888 --no-http-auth --detach " + # f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + # f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " + # f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " + # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + # f"{cfg.CUSTOM_ENV_NAME} " + # f"'{cmd}'" + # ) + # run(f"neuro job browse {cfg.JUPYTER_NAME}") + raise NotImplemented() + + +def kill_jupyter(cfg: Config) -> None: + # run(f"neuro kill {cfg.JUPYTER_NAME}") + raise NotImplemented() + + +def run_tensorboard(cfg: Config) -> None: + # cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" + # run( + # f"neuro run " + # f"--name {cfg.TENSORBOARD_NAME} " + # f"--preset cpu-small " + # f"--http 6006 --no-http-auth --detach " + # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " + # f"{cfg.CUSTOM_ENV_NAME} " + # f"'{cmd}'" + # ) + # run(f"neuro job browse {cfg.TENSORBOARD_NAME}") + raise NotImplemented() + + +def kill_tensorboard(cfg: Config) -> None: + # run(f"neuro kill {cfg.TENSORBOARD_NAME}") + raise NotImplemented() + + +def run_filebrowser(cfg: Config) -> None: + # run( + # f"neuro run " + # f"--name {cfg.FILEBROWSER_NAME} " + # f"--preset cpu-small " + # f"--http 80 --no-http-auth --detach " + # f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " + # f"filebrowser/filebrowser" + # ) + # run(f"neuro job browse {cfg.FILEBROWSER_NAME}") + raise NotImplemented() + + +def kill_filebrowser(cfg: Config) -> None: + # run(f"neuro kill {cfg.FILEBROWSER_NAME}") + raise NotImplemented() + + +def kill(cfg: Config) -> None: + # kill_training() + # kill_jupyter() + # kill_tensorboard() + # kill_filebrowser() + raise NotImplemented() ##### LOCAL ##### -def setup_local(): - run("pip install -r requirements/pip.txt") +def setup_local(cfg: Config) -> None: + # run("pip install -r requirements/pip.txt") + raise NotImplemented() -def lint(): - run("flake8 .") - run("mypy .") +def lint(cfg: Config) -> None: + # run("flake8 .") + # run("mypy .") + raise NotImplemented() -def install(): - run("python setup.py install --user") +def install(cfg: Config) -> None: + # run("python setup.py install --user") + raise NotImplemented() ##### MISC ##### -def ps(): - run(f"neuro ps") +def ps(cfg: Config) -> None: + # run(f"neuro ps") + raise NotImplemented() if __name__ == "__main__": diff --git a/neuro-shortcuts/requirements-test.txt b/neuro-shortcuts/requirements-dev.txt similarity index 84% rename from neuro-shortcuts/requirements-test.txt rename to neuro-shortcuts/requirements-dev.txt index a8e0eb2c..7d613ea3 100644 --- a/neuro-shortcuts/requirements-test.txt +++ b/neuro-shortcuts/requirements-dev.txt @@ -1,4 +1,5 @@ #neuromation==19.9.2 +mypy==0.711 flake8==3.7.8 isort==4.3.21 black==19.3b0 From 46592c4b5cdc47657d23e477e8ab40b7ca23c5fa Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 13:04:46 +0300 Subject: [PATCH 06/15] current --- neuro-shortcuts/.gitignore | 13 +- neuro-shortcuts/neuro_shortcuts/.gitignore | 10 + .../{ => neuro_shortcuts}/Makefile | 0 neuro-shortcuts/neuro_shortcuts/__init__.py | 0 .../neuro_shortcuts/_internals/__init__.py | 0 .../neuro_shortcuts/_internals/abc.py | 68 +++++ .../neuro_shortcuts/_internals/collector.py | 11 + .../{utils.py => _internals/runner.py} | 32 +- neuro-shortcuts/neuro_shortcuts/config.py | 7 + neuro-shortcuts/neuro_shortcuts/neu.py | 276 +----------------- .../neuro_shortcuts/requirements-dev.txt | 5 + .../{ => neuro_shortcuts}/setup.py | 3 +- neuro-shortcuts/neuro_shortcuts/shortcuts.py | 209 +++++++++++++ 13 files changed, 324 insertions(+), 310 deletions(-) create mode 100644 neuro-shortcuts/neuro_shortcuts/.gitignore rename neuro-shortcuts/{ => neuro_shortcuts}/Makefile (100%) create mode 100644 neuro-shortcuts/neuro_shortcuts/__init__.py create mode 100644 neuro-shortcuts/neuro_shortcuts/_internals/__init__.py create mode 100644 neuro-shortcuts/neuro_shortcuts/_internals/abc.py create mode 100644 neuro-shortcuts/neuro_shortcuts/_internals/collector.py rename neuro-shortcuts/neuro_shortcuts/{utils.py => _internals/runner.py} (89%) create mode 100644 neuro-shortcuts/neuro_shortcuts/config.py create mode 100644 neuro-shortcuts/neuro_shortcuts/requirements-dev.txt rename neuro-shortcuts/{ => neuro_shortcuts}/setup.py (69%) create mode 100644 neuro-shortcuts/neuro_shortcuts/shortcuts.py diff --git a/neuro-shortcuts/.gitignore b/neuro-shortcuts/.gitignore index 59f4055b..24224572 100644 --- a/neuro-shortcuts/.gitignore +++ b/neuro-shortcuts/.gitignore @@ -1,11 +1,2 @@ -/.env/ -/venv/ -pip-wheel-metadata -.pytest_cache -*.swo -*.egg-info -**/__pycache__ -.eggs -.mypy_cache -.tmontmp -.testmondata +.mypy_cache/ + diff --git a/neuro-shortcuts/neuro_shortcuts/.gitignore b/neuro-shortcuts/neuro_shortcuts/.gitignore new file mode 100644 index 00000000..a784a902 --- /dev/null +++ b/neuro-shortcuts/neuro_shortcuts/.gitignore @@ -0,0 +1,10 @@ +/.env/ +/venv/ +/build/ +pip-wheel-metadata +*.swo +*.egg-info +**/__pycache__ +.eggs +.tmontmp +.testmondata diff --git a/neuro-shortcuts/Makefile b/neuro-shortcuts/neuro_shortcuts/Makefile similarity index 100% rename from neuro-shortcuts/Makefile rename to neuro-shortcuts/neuro_shortcuts/Makefile diff --git a/neuro-shortcuts/neuro_shortcuts/__init__.py b/neuro-shortcuts/neuro_shortcuts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/neuro-shortcuts/neuro_shortcuts/_internals/__init__.py b/neuro-shortcuts/neuro_shortcuts/_internals/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/neuro-shortcuts/neuro_shortcuts/_internals/abc.py b/neuro-shortcuts/neuro_shortcuts/_internals/abc.py new file mode 100644 index 00000000..0fa6ca7e --- /dev/null +++ b/neuro-shortcuts/neuro_shortcuts/_internals/abc.py @@ -0,0 +1,68 @@ +import abc +from dataclasses import dataclass + + +class Project(abc.ABC): + @abc.abstractmethod + def root(self) -> str: + pass + + @property + def data(self) -> str: + return f"{self.root}/data" + + @property + def code(self) -> str: + return f"{self.root}/code" + + @property + def notebooks(self) -> str: + return f"{self.root}/notebooks" + + @property + def requirements(self) -> str: + return f"{self.root}/requirements" + + @property + def results(self) -> str: + return f"{self.root}/results" + + +class StorageProject(Project): + def __init__(self, project_name: str) -> None: + self._project_name = project_name + + def root(self) -> str: + return f"storage:{self._project_name}" + + +class LocalProject(Project): + def __init__(self): + # TODO (artem) remember `pwd` as `self._project_path` + pass + + def root(self) -> str: + # TODO: return self._project_path + raise NotImplemented() + + +class ContainerProject(Project): + def root(self) -> str: + # TODO: always in the root? + return "/project" + + +@dataclass +class Config: + local: LocalProject + storage: LocalProject + container: LocalProject + + # TODO + SETUP_NAME = "setup" + TRAINING_NAME = "training" + JUPYTER_NAME = "jupyter" + TENSORBOARD_NAME = "tensorboard" + FILEBROWSER_NAME = "filebrowser" + BASE_ENV_NAME = "image:neuro/base" + CUSTOM_ENV_NAME = "image:neuro/custom" diff --git a/neuro-shortcuts/neuro_shortcuts/_internals/collector.py b/neuro-shortcuts/neuro_shortcuts/_internals/collector.py new file mode 100644 index 00000000..c68602db --- /dev/null +++ b/neuro-shortcuts/neuro_shortcuts/_internals/collector.py @@ -0,0 +1,11 @@ +import sys +import typing as t + +from neuro_shortcuts._internals.abc import Config + + +def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[Config], None]]: + f_names = dir(module_name) + print(f_names) + exit(33) + return {f_name: getattr(sys.modules[module_name], f_name) for f_name in f_names} diff --git a/neuro-shortcuts/neuro_shortcuts/utils.py b/neuro-shortcuts/neuro_shortcuts/_internals/runner.py similarity index 89% rename from neuro-shortcuts/neuro_shortcuts/utils.py rename to neuro-shortcuts/neuro_shortcuts/_internals/runner.py index 47595aaa..d962502e 100644 --- a/neuro-shortcuts/neuro_shortcuts/utils.py +++ b/neuro-shortcuts/neuro_shortcuts/_internals/runner.py @@ -1,4 +1,3 @@ -import inspect import logging import re import shlex @@ -9,23 +8,11 @@ from contextlib import contextmanager from pathlib import Path from time import sleep -from uuid import uuid4 OUT_DIRECTORY_NAME = "out" SUBMITTED_JOBS_FILE_NAME = "submitted_jobs.txt" -DEFAULT_TIMEOUT = 5 * 60 - -SysCap = namedtuple("SysCap", "out err") - - -job_id_pattern = re.compile( - # pattern for UUID v4 taken here: https://stackoverflow.com/a/38191078 - r"(job-[0-9a-f]{8}-[0-9a-f]{4}-[4][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12})", - re.IGNORECASE, -) - def get_submitted_jobs_file() -> Path: project_root = Path(__file__).resolve().parent @@ -35,21 +22,18 @@ def get_submitted_jobs_file() -> Path: SUBMITTED_JOBS_FILE = get_submitted_jobs_file() -log = logging.getLogger(__name__) - - -def split_command(cmd: str) -> t.List[str]: - return shlex.split(cmd) +DEFAULT_TIMEOUT = 5 * 60 -def random_str(length: int) -> str: - assert 0 <= length <= 32, length - return uuid4().hex[:length] +SysCap = namedtuple("SysCap", "out err") +log = logging.getLogger(__name__) -def generate_job_name() -> str: - postfix = f"-{random_str(4)}" - return inspect.stack()[1].function.replace("_", "-") + postfix +job_id_pattern = re.compile( + # pattern for UUID v4 taken here: https://stackoverflow.com/a/38191078 + r"(job-[0-9a-f]{8}-[0-9a-f]{4}-[4][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12})", + re.IGNORECASE, +) @contextmanager diff --git a/neuro-shortcuts/neuro_shortcuts/config.py b/neuro-shortcuts/neuro_shortcuts/config.py new file mode 100644 index 00000000..a58e51b0 --- /dev/null +++ b/neuro-shortcuts/neuro_shortcuts/config.py @@ -0,0 +1,7 @@ +SETUP_NAME = "setup" +TRAINING_NAME = "training" +JUPYTER_NAME = "jupyter" +TENSORBOARD_NAME = "tensorboard" +FILEBROWSER_NAME = "filebrowser" +BASE_ENV_NAME = "image:neuro/base" +CUSTOM_ENV_NAME = "image:neuro/custom" diff --git a/neuro-shortcuts/neuro_shortcuts/neu.py b/neuro-shortcuts/neuro_shortcuts/neu.py index c37b53c7..3a40b901 100644 --- a/neuro-shortcuts/neuro_shortcuts/neu.py +++ b/neuro-shortcuts/neuro_shortcuts/neu.py @@ -1,276 +1,4 @@ -import abc -from dataclasses import dataclass - -from .utils import run - - -class Project(abc.ABC): - @abc.abstractmethod - def root(self) -> str: - pass - - @property - def data(self) -> str: - return f"{self.root}/data" - - @property - def code(self) -> str: - return f"{self.root}/code" - - @property - def notebooks(self) -> str: - return f"{self.root}/notebooks" - - @property - def requirements(self) -> str: - return f"{self.root}/requirements" - - @property - def results(self) -> str: - return f"{self.root}/results" - - -class StorageProject(Project): - def __init__(self, project_name: str) -> None: - self._project_name = project_name - - def root(self) -> str: - return f"storage:{self._project_name}" - - -class LocalProject(Project): - def __init__(self): - # TODO (artem) remember `pwd` as `self._project_path` - pass - - def root(self) -> str: - # TODO: return self._project_path - raise NotImplemented() - - -class ContainerProject(Project): - def root(self) -> str: - # TODO: always in the root? - return "/project" - - -@dataclass -class Config: - local: LocalProject - storage: LocalProject - container: LocalProject - - SETUP_NAME = "setup" - TRAINING_NAME = "training" - JUPYTER_NAME = "jupyter" - TENSORBOARD_NAME = "tensorboard" - FILEBROWSER_NAME = "filebrowser" - BASE_ENV_NAME = "image:neuro/base" - CUSTOM_ENV_NAME = "image:neuro/custom" - - -##### SETUP ##### - - -def setup(cfg: Config): - run(f"neuro kill {cfg.SETUP_NAME}") - cmd = ( - f"neuro run --name {cfg.SETUP_NAME} --preset cpu-small --detach " - f"--volume {cfg.storage.root}:{cfg.container.root}:ro " - f"{cfg.BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' - ) - run(cmd) - run(f"neuro cp -r {cfg.local.requirements} {cfg.storage.requirements}") - # TODO: see below - # For some reason the second command fail - # neuro exec {cfg.SETUP_NAME} 'apt-get update' - # neuro exec {cfg.SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' - run( - f"neuro exec {cfg.SETUP_NAME} 'pip install -r {cfg.container.requirements}/pip.txt'" - ) - run(f"neuro job save {cfg.SETUP_NAME} {cfg.CUSTOM_ENV_NAME}") - run(f"neuro kill {cfg.SETUP_NAME}") - - -##### STORAGE ##### - - -def upload_code(cfg: Config) -> None: - run(f"neuro cp -r -T {cfg.local.code} {cfg.container.code}") - - -# TODO: redundant? clean where? locally? -def clean_code(cfg: Config) -> None: - # run(f"neuro rm -r {CODE_PATH_STORAGE}") - raise NotImplemented() - - -def upload_data(cfg: Config) -> None: - # run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") - raise NotImplemented() - - -def clean_data(cfg: Config) -> None: - # run(f"neuro rm -r {DATA_PATH_STORAGE}") - raise NotImplemented() - - -def upload_notebooks(cfg: Config) -> None: - # run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") - raise NotImplemented() - - -def download_notebooks(cfg: Config) -> None: - # run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") - raise NotImplemented() - - -def clean_notebooks(cfg: Config) -> None: - # run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") - raise NotImplemented() - - -def upload(cfg: Config) -> None: - # upload_code() - # upload_data() - # upload_notebooks() - raise NotImplemented() - - -def clean(cfg: Config) -> None: - # clean_code() - # clean_data() - # clean_notebooks() - raise NotImplemented() - - -##### JOBS ##### - - -def run_training(cfg: Config) -> None: - # cmd = ( - # f"python {CODE_PATH_ENV}/train.py --log_dir " - # f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" - # ) - # run( - # f"neuro run --name {cfg.TRAINING_NAME} --preset gpu-small " - # f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - # f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " - # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - # f"{cfg.CUSTOM_ENV_NAME} " - # f"'{cmd}'" - # ) - raise NotImplemented() - - -def kill_training(cfg: Config) -> None: - # run(f"neuro kill {cfg.TRAINING_NAME}") - raise NotImplemented() - - -def connect_training(cfg: Config) -> None: - # run(f"neuro exec {cfg.TRAINING_NAME} bash") - raise NotImplemented() - - -def run_jupyter(cfg: Config) -> None: - # cmd = ( - # f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " - # f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" - # ) - # run( - # f"neuro run " - # f"--name {cfg.JUPYTER_NAME} " - # f"--preset gpu-small " - # f"--http 8888 --no-http-auth --detach " - # f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - # f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " - # f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " - # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - # f"{cfg.CUSTOM_ENV_NAME} " - # f"'{cmd}'" - # ) - # run(f"neuro job browse {cfg.JUPYTER_NAME}") - raise NotImplemented() - - -def kill_jupyter(cfg: Config) -> None: - # run(f"neuro kill {cfg.JUPYTER_NAME}") - raise NotImplemented() - - -def run_tensorboard(cfg: Config) -> None: - # cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" - # run( - # f"neuro run " - # f"--name {cfg.TENSORBOARD_NAME} " - # f"--preset cpu-small " - # f"--http 6006 --no-http-auth --detach " - # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " - # f"{cfg.CUSTOM_ENV_NAME} " - # f"'{cmd}'" - # ) - # run(f"neuro job browse {cfg.TENSORBOARD_NAME}") - raise NotImplemented() - - -def kill_tensorboard(cfg: Config) -> None: - # run(f"neuro kill {cfg.TENSORBOARD_NAME}") - raise NotImplemented() - - -def run_filebrowser(cfg: Config) -> None: - # run( - # f"neuro run " - # f"--name {cfg.FILEBROWSER_NAME} " - # f"--preset cpu-small " - # f"--http 80 --no-http-auth --detach " - # f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " - # f"filebrowser/filebrowser" - # ) - # run(f"neuro job browse {cfg.FILEBROWSER_NAME}") - raise NotImplemented() - - -def kill_filebrowser(cfg: Config) -> None: - # run(f"neuro kill {cfg.FILEBROWSER_NAME}") - raise NotImplemented() - - -def kill(cfg: Config) -> None: - # kill_training() - # kill_jupyter() - # kill_tensorboard() - # kill_filebrowser() - raise NotImplemented() - - -##### LOCAL ##### - - -def setup_local(cfg: Config) -> None: - # run("pip install -r requirements/pip.txt") - raise NotImplemented() - - -def lint(cfg: Config) -> None: - # run("flake8 .") - # run("mypy .") - raise NotImplemented() - - -def install(cfg: Config) -> None: - # run("python setup.py install --user") - raise NotImplemented() - - -##### MISC ##### - - -def ps(cfg: Config) -> None: - # run(f"neuro ps") - raise NotImplemented() - +from ._internals.collector import collect_entrypoints if __name__ == "__main__": - setup() + entrypoints = collect_entrypoints("shortcuts") diff --git a/neuro-shortcuts/neuro_shortcuts/requirements-dev.txt b/neuro-shortcuts/neuro_shortcuts/requirements-dev.txt new file mode 100644 index 00000000..7d613ea3 --- /dev/null +++ b/neuro-shortcuts/neuro_shortcuts/requirements-dev.txt @@ -0,0 +1,5 @@ +#neuromation==19.9.2 +mypy==0.711 +flake8==3.7.8 +isort==4.3.21 +black==19.3b0 diff --git a/neuro-shortcuts/setup.py b/neuro-shortcuts/neuro_shortcuts/setup.py similarity index 69% rename from neuro-shortcuts/setup.py rename to neuro-shortcuts/neuro_shortcuts/setup.py index 22cc34ba..d19366f8 100644 --- a/neuro-shortcuts/setup.py +++ b/neuro-shortcuts/neuro_shortcuts/setup.py @@ -7,5 +7,6 @@ packages=find_packages(), python_requires=">=3.7.0", install_requires=(), - entry_points={"pytest11": []}, + # TODO: console_scripts for running from python console + scripts=["neu.py"], ) diff --git a/neuro-shortcuts/neuro_shortcuts/shortcuts.py b/neuro-shortcuts/neuro_shortcuts/shortcuts.py new file mode 100644 index 00000000..ca1512c1 --- /dev/null +++ b/neuro-shortcuts/neuro_shortcuts/shortcuts.py @@ -0,0 +1,209 @@ +from ._internals.abc import Config +from ._internals.runner import run + + +##### SETUP ##### + + +def setup(cfg: Config): + run(f"neuro kill {cfg.SETUP_NAME}") + cmd = ( + f"neuro run --name {cfg.SETUP_NAME} --preset cpu-small --detach " + f"--volume {cfg.storage.root}:{cfg.container.root}:ro " + f"{cfg.BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' + ) + run(cmd) + run(f"neuro cp -r {cfg.local.requirements} {cfg.storage.requirements}") + # TODO: see below + # For some reason the second command fail + # neuro exec {cfg.SETUP_NAME} 'apt-get update' + # neuro exec {cfg.SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' + run( + f"neuro exec {cfg.SETUP_NAME} 'pip install -r {cfg.container.requirements}/pip.txt'" + ) + run(f"neuro job save {cfg.SETUP_NAME} {cfg.CUSTOM_ENV_NAME}") + run(f"neuro kill {cfg.SETUP_NAME}") + + +##### STORAGE ##### + + +def upload_code(cfg: Config) -> None: + run(f"neuro cp -r -T {cfg.local.code} {cfg.container.code}") + + +# TODO: redundant? clean where? locally? +def clean_code(cfg: Config) -> None: + # run(f"neuro rm -r {CODE_PATH_STORAGE}") + raise NotImplemented() + + +def upload_data(cfg: Config) -> None: + # run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") + raise NotImplemented() + + +def clean_data(cfg: Config) -> None: + # run(f"neuro rm -r {DATA_PATH_STORAGE}") + raise NotImplemented() + + +def upload_notebooks(cfg: Config) -> None: + # run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") + raise NotImplemented() + + +def download_notebooks(cfg: Config) -> None: + # run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") + raise NotImplemented() + + +def clean_notebooks(cfg: Config) -> None: + # run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") + raise NotImplemented() + + +def upload(cfg: Config) -> None: + # upload_code() + # upload_data() + # upload_notebooks() + raise NotImplemented() + + +def clean(cfg: Config) -> None: + # clean_code() + # clean_data() + # clean_notebooks() + raise NotImplemented() + + +##### JOBS ##### + + +def run_training(cfg: Config) -> None: + # cmd = ( + # f"python {CODE_PATH_ENV}/train.py --log_dir " + # f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" + # ) + # run( + # f"neuro run --name {cfg.TRAINING_NAME} --preset gpu-small " + # f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + # f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " + # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + # f"{cfg.CUSTOM_ENV_NAME} " + # f"'{cmd}'" + # ) + raise NotImplemented() + + +def kill_training(cfg: Config) -> None: + # run(f"neuro kill {cfg.TRAINING_NAME}") + raise NotImplemented() + + +def connect_training(cfg: Config) -> None: + # run(f"neuro exec {cfg.TRAINING_NAME} bash") + raise NotImplemented() + + +def run_jupyter(cfg: Config) -> None: + # cmd = ( + # f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " + # f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" + # ) + # run( + # f"neuro run " + # f"--name {cfg.JUPYTER_NAME} " + # f"--preset gpu-small " + # f"--http 8888 --no-http-auth --detach " + # f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + # f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " + # f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " + # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + # f"{cfg.CUSTOM_ENV_NAME} " + # f"'{cmd}'" + # ) + # run(f"neuro job browse {cfg.JUPYTER_NAME}") + raise NotImplemented() + + +def kill_jupyter(cfg: Config) -> None: + # run(f"neuro kill {cfg.JUPYTER_NAME}") + raise NotImplemented() + + +def run_tensorboard(cfg: Config) -> None: + # cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" + # run( + # f"neuro run " + # f"--name {cfg.TENSORBOARD_NAME} " + # f"--preset cpu-small " + # f"--http 6006 --no-http-auth --detach " + # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " + # f"{cfg.CUSTOM_ENV_NAME} " + # f"'{cmd}'" + # ) + # run(f"neuro job browse {cfg.TENSORBOARD_NAME}") + raise NotImplemented() + + +def kill_tensorboard(cfg: Config) -> None: + # run(f"neuro kill {cfg.TENSORBOARD_NAME}") + raise NotImplemented() + + +def run_filebrowser(cfg: Config) -> None: + # run( + # f"neuro run " + # f"--name {cfg.FILEBROWSER_NAME} " + # f"--preset cpu-small " + # f"--http 80 --no-http-auth --detach " + # f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " + # f"filebrowser/filebrowser" + # ) + # run(f"neuro job browse {cfg.FILEBROWSER_NAME}") + raise NotImplemented() + + +def kill_filebrowser(cfg: Config) -> None: + # run(f"neuro kill {cfg.FILEBROWSER_NAME}") + raise NotImplemented() + + +def kill(cfg: Config) -> None: + # kill_training() + # kill_jupyter() + # kill_tensorboard() + # kill_filebrowser() + raise NotImplemented() + + +##### LOCAL ##### + + +def setup_local(cfg: Config) -> None: + # run("pip install -r requirements/pip.txt") + raise NotImplemented() + + +def lint(cfg: Config) -> None: + # run("flake8 .") + # run("mypy .") + raise NotImplemented() + + +def install(cfg: Config) -> None: + # run("python setup.py install --user") + raise NotImplemented() + + +##### MISC ##### + + +def ps(cfg: Config) -> None: + # run(f"neuro ps") + raise NotImplemented() + + +if __name__ == "__main__": + setup() From 6d2ea63c4b71fdc9efa0e1f151f38f411ae1b99a Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 15:55:13 +0300 Subject: [PATCH 07/15] final: test --- neuro-shortcuts/.gitignore | 11 ++++- .../{neuro_shortcuts => }/Makefile | 0 neuro-shortcuts/neu.py | 42 +++++++++++++++++++ neuro-shortcuts/neuro_shortcuts/.gitignore | 10 ----- .../neuro_shortcuts/_internals/collector.py | 11 ----- neuro-shortcuts/neuro_shortcuts/config.py | 7 ---- neuro-shortcuts/neuro_shortcuts/neu.py | 4 -- .../neuro_shortcuts/requirements-dev.txt | 5 --- .../{neuro_shortcuts => }/setup.py | 0 .../__init__.py | 0 .../shortcuts.py => shortcuts/api.py} | 7 +++- neuro-shortcuts/shortcuts/config.py | 24 +++++++++++ .../internals}/__init__.py | 0 .../_internals => shortcuts/internals}/abc.py | 22 +++++----- .../shortcuts/internals/loaders.py | 20 +++++++++ .../internals/runners.py} | 0 16 files changed, 112 insertions(+), 51 deletions(-) rename neuro-shortcuts/{neuro_shortcuts => }/Makefile (100%) create mode 100644 neuro-shortcuts/neu.py delete mode 100644 neuro-shortcuts/neuro_shortcuts/.gitignore delete mode 100644 neuro-shortcuts/neuro_shortcuts/_internals/collector.py delete mode 100644 neuro-shortcuts/neuro_shortcuts/config.py delete mode 100644 neuro-shortcuts/neuro_shortcuts/neu.py delete mode 100644 neuro-shortcuts/neuro_shortcuts/requirements-dev.txt rename neuro-shortcuts/{neuro_shortcuts => }/setup.py (100%) rename neuro-shortcuts/{neuro_shortcuts => shortcuts}/__init__.py (100%) rename neuro-shortcuts/{neuro_shortcuts/shortcuts.py => shortcuts/api.py} (97%) create mode 100644 neuro-shortcuts/shortcuts/config.py rename neuro-shortcuts/{neuro_shortcuts/_internals => shortcuts/internals}/__init__.py (100%) rename neuro-shortcuts/{neuro_shortcuts/_internals => shortcuts/internals}/abc.py (79%) create mode 100644 neuro-shortcuts/shortcuts/internals/loaders.py rename neuro-shortcuts/{neuro_shortcuts/_internals/runner.py => shortcuts/internals/runners.py} (100%) diff --git a/neuro-shortcuts/.gitignore b/neuro-shortcuts/.gitignore index 24224572..49a35173 100644 --- a/neuro-shortcuts/.gitignore +++ b/neuro-shortcuts/.gitignore @@ -1,2 +1,11 @@ +/.env/ +/venv/ +/build/ .mypy_cache/ - +pip-wheel-metadata +*.swo +*.egg-info +**/__pycache__ +.eggs +.tmontmp +.testmondata diff --git a/neuro-shortcuts/neuro_shortcuts/Makefile b/neuro-shortcuts/Makefile similarity index 100% rename from neuro-shortcuts/neuro_shortcuts/Makefile rename to neuro-shortcuts/Makefile diff --git a/neuro-shortcuts/neu.py b/neuro-shortcuts/neu.py new file mode 100644 index 00000000..2ba2598f --- /dev/null +++ b/neuro-shortcuts/neu.py @@ -0,0 +1,42 @@ +import pathlib + +from shortcuts.config import create_config +from shortcuts.internals.loaders import collect_entrypoints +from argparse import ArgumentParser +API_MODULE_NAME = "shortcuts.api" + + +def create_parser() -> ArgumentParser: + parser = ArgumentParser(description='TODO root') + parser.add_argument('action', metavar='ACTION', type=str, + help='TODO action') + return parser + +def get_project_name() -> str: + """ We assume that the parent directory is named same as the project + """ + current_dir = pathlib.Path(__file__).resolve() + return str(current_dir.parent) + + +if __name__ == "__main__": + parser = create_parser() + args = parser.parse_args() + action_name = args.action + + module_name = API_MODULE_NAME + entrypoints = collect_entrypoints(module_name) + action = entrypoints.get(action_name) + if not action: + print(f"ERROR: Cannot find action '{action_name}' in module '{module_name}'") + print(f"Available actions: {', '.join(entrypoints.keys())}") + exit(1) + + project_name = get_project_name() + cfg = create_config(project_name) + + try: + action(cfg) + except (TypeError, AttributeError) as e: + print(f"ERROR: Could not execute action '{action_name}': {e}") + diff --git a/neuro-shortcuts/neuro_shortcuts/.gitignore b/neuro-shortcuts/neuro_shortcuts/.gitignore deleted file mode 100644 index a784a902..00000000 --- a/neuro-shortcuts/neuro_shortcuts/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -/.env/ -/venv/ -/build/ -pip-wheel-metadata -*.swo -*.egg-info -**/__pycache__ -.eggs -.tmontmp -.testmondata diff --git a/neuro-shortcuts/neuro_shortcuts/_internals/collector.py b/neuro-shortcuts/neuro_shortcuts/_internals/collector.py deleted file mode 100644 index c68602db..00000000 --- a/neuro-shortcuts/neuro_shortcuts/_internals/collector.py +++ /dev/null @@ -1,11 +0,0 @@ -import sys -import typing as t - -from neuro_shortcuts._internals.abc import Config - - -def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[Config], None]]: - f_names = dir(module_name) - print(f_names) - exit(33) - return {f_name: getattr(sys.modules[module_name], f_name) for f_name in f_names} diff --git a/neuro-shortcuts/neuro_shortcuts/config.py b/neuro-shortcuts/neuro_shortcuts/config.py deleted file mode 100644 index a58e51b0..00000000 --- a/neuro-shortcuts/neuro_shortcuts/config.py +++ /dev/null @@ -1,7 +0,0 @@ -SETUP_NAME = "setup" -TRAINING_NAME = "training" -JUPYTER_NAME = "jupyter" -TENSORBOARD_NAME = "tensorboard" -FILEBROWSER_NAME = "filebrowser" -BASE_ENV_NAME = "image:neuro/base" -CUSTOM_ENV_NAME = "image:neuro/custom" diff --git a/neuro-shortcuts/neuro_shortcuts/neu.py b/neuro-shortcuts/neuro_shortcuts/neu.py deleted file mode 100644 index 3a40b901..00000000 --- a/neuro-shortcuts/neuro_shortcuts/neu.py +++ /dev/null @@ -1,4 +0,0 @@ -from ._internals.collector import collect_entrypoints - -if __name__ == "__main__": - entrypoints = collect_entrypoints("shortcuts") diff --git a/neuro-shortcuts/neuro_shortcuts/requirements-dev.txt b/neuro-shortcuts/neuro_shortcuts/requirements-dev.txt deleted file mode 100644 index 7d613ea3..00000000 --- a/neuro-shortcuts/neuro_shortcuts/requirements-dev.txt +++ /dev/null @@ -1,5 +0,0 @@ -#neuromation==19.9.2 -mypy==0.711 -flake8==3.7.8 -isort==4.3.21 -black==19.3b0 diff --git a/neuro-shortcuts/neuro_shortcuts/setup.py b/neuro-shortcuts/setup.py similarity index 100% rename from neuro-shortcuts/neuro_shortcuts/setup.py rename to neuro-shortcuts/setup.py diff --git a/neuro-shortcuts/neuro_shortcuts/__init__.py b/neuro-shortcuts/shortcuts/__init__.py similarity index 100% rename from neuro-shortcuts/neuro_shortcuts/__init__.py rename to neuro-shortcuts/shortcuts/__init__.py diff --git a/neuro-shortcuts/neuro_shortcuts/shortcuts.py b/neuro-shortcuts/shortcuts/api.py similarity index 97% rename from neuro-shortcuts/neuro_shortcuts/shortcuts.py rename to neuro-shortcuts/shortcuts/api.py index ca1512c1..a84fd6a8 100644 --- a/neuro-shortcuts/neuro_shortcuts/shortcuts.py +++ b/neuro-shortcuts/shortcuts/api.py @@ -1,5 +1,5 @@ -from ._internals.abc import Config -from ._internals.runner import run +from .internals.abc import Config +from .internals.runners import run ##### SETUP ##### @@ -25,6 +25,9 @@ def setup(cfg: Config): run(f"neuro kill {cfg.SETUP_NAME}") +def test(cfg: Config): + run("neuro ls") + ##### STORAGE ##### diff --git a/neuro-shortcuts/shortcuts/config.py b/neuro-shortcuts/shortcuts/config.py new file mode 100644 index 00000000..7766d62c --- /dev/null +++ b/neuro-shortcuts/shortcuts/config.py @@ -0,0 +1,24 @@ +from .internals.abc import Config, StorageProject, LocalProject, ContainerProject + +SETUP_NAME = "setup" +TRAINING_NAME = "training" +JUPYTER_NAME = "jupyter" +TENSORBOARD_NAME = "tensorboard" +FILEBROWSER_NAME = "filebrowser" +BASE_ENV_NAME = "image:neuro/base" +CUSTOM_ENV_NAME = "image:neuro/custom" + + +def create_config(project_name: str) -> Config: + return Config( + local=LocalProject(), + storage=StorageProject(project_name), + container=ContainerProject(), + SETUP_NAME=SETUP_NAME, + TRAINING_NAME=TRAINING_NAME, + JUPYTER_NAME=JUPYTER_NAME, + TENSORBOARD_NAME=TENSORBOARD_NAME, + FILEBROWSER_NAME=FILEBROWSER_NAME, + BASE_ENV_NAME=BASE_ENV_NAME, + CUSTOM_ENV_NAME=CUSTOM_ENV_NAME, + ) \ No newline at end of file diff --git a/neuro-shortcuts/neuro_shortcuts/_internals/__init__.py b/neuro-shortcuts/shortcuts/internals/__init__.py similarity index 100% rename from neuro-shortcuts/neuro_shortcuts/_internals/__init__.py rename to neuro-shortcuts/shortcuts/internals/__init__.py diff --git a/neuro-shortcuts/neuro_shortcuts/_internals/abc.py b/neuro-shortcuts/shortcuts/internals/abc.py similarity index 79% rename from neuro-shortcuts/neuro_shortcuts/_internals/abc.py rename to neuro-shortcuts/shortcuts/internals/abc.py index 0fa6ca7e..d5e962c2 100644 --- a/neuro-shortcuts/neuro_shortcuts/_internals/abc.py +++ b/neuro-shortcuts/shortcuts/internals/abc.py @@ -55,14 +55,14 @@ def root(self) -> str: @dataclass class Config: local: LocalProject - storage: LocalProject - container: LocalProject - - # TODO - SETUP_NAME = "setup" - TRAINING_NAME = "training" - JUPYTER_NAME = "jupyter" - TENSORBOARD_NAME = "tensorboard" - FILEBROWSER_NAME = "filebrowser" - BASE_ENV_NAME = "image:neuro/base" - CUSTOM_ENV_NAME = "image:neuro/custom" + storage: StorageProject + container: ContainerProject + + # TODO: cleanup + SETUP_NAME: str + TRAINING_NAME: str + JUPYTER_NAME: str + TENSORBOARD_NAME: str + FILEBROWSER_NAME: str + BASE_ENV_NAME: str + CUSTOM_ENV_NAME: str diff --git a/neuro-shortcuts/shortcuts/internals/loaders.py b/neuro-shortcuts/shortcuts/internals/loaders.py new file mode 100644 index 00000000..5e4e8a69 --- /dev/null +++ b/neuro-shortcuts/shortcuts/internals/loaders.py @@ -0,0 +1,20 @@ +import importlib +import sys +import typing as t + +from shortcuts.internals.abc import Config + + +def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[Config], None]]: + """ collect all callable object from module `module_name` + whose names start with a lowerase english alphabet character + """ + importlib.import_module(module_name) + module = sys.modules[module_name] + result: t.Dict[str, t.Callable] = {} + for name in dir(module): + if 'a' <= name[0] <= 'z': + obj = getattr(module, name) + if callable(obj): + result[name] = obj + return result diff --git a/neuro-shortcuts/neuro_shortcuts/_internals/runner.py b/neuro-shortcuts/shortcuts/internals/runners.py similarity index 100% rename from neuro-shortcuts/neuro_shortcuts/_internals/runner.py rename to neuro-shortcuts/shortcuts/internals/runners.py From 582dc5984fb8bebd55f6fff445af12f5d3e598a4 Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 18:07:45 +0300 Subject: [PATCH 08/15] towards simplicity --- neuro-shortcuts/Makefile | 2 +- neuro-shortcuts/shortcuts/api.py | 212 -------------------- neuro-shortcuts/shortcuts/config.py | 219 +++++++++++++++++++-- neuro-shortcuts/shortcuts/internals/abc.py | 136 ++++++------- 4 files changed, 274 insertions(+), 295 deletions(-) delete mode 100644 neuro-shortcuts/shortcuts/api.py diff --git a/neuro-shortcuts/Makefile b/neuro-shortcuts/Makefile index a54332c9..4ada006a 100644 --- a/neuro-shortcuts/Makefile +++ b/neuro-shortcuts/Makefile @@ -1,4 +1,4 @@ -ISORT_DIRS := neuro_shortcuts setup.py +ISORT_DIRS := neuro-shortcuts setup.py BLACK_DIRS := $(ISORT_DIRS) MYPY_DIRS := tests diff --git a/neuro-shortcuts/shortcuts/api.py b/neuro-shortcuts/shortcuts/api.py deleted file mode 100644 index a84fd6a8..00000000 --- a/neuro-shortcuts/shortcuts/api.py +++ /dev/null @@ -1,212 +0,0 @@ -from .internals.abc import Config -from .internals.runners import run - - -##### SETUP ##### - - -def setup(cfg: Config): - run(f"neuro kill {cfg.SETUP_NAME}") - cmd = ( - f"neuro run --name {cfg.SETUP_NAME} --preset cpu-small --detach " - f"--volume {cfg.storage.root}:{cfg.container.root}:ro " - f"{cfg.BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' - ) - run(cmd) - run(f"neuro cp -r {cfg.local.requirements} {cfg.storage.requirements}") - # TODO: see below - # For some reason the second command fail - # neuro exec {cfg.SETUP_NAME} 'apt-get update' - # neuro exec {cfg.SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' - run( - f"neuro exec {cfg.SETUP_NAME} 'pip install -r {cfg.container.requirements}/pip.txt'" - ) - run(f"neuro job save {cfg.SETUP_NAME} {cfg.CUSTOM_ENV_NAME}") - run(f"neuro kill {cfg.SETUP_NAME}") - - -def test(cfg: Config): - run("neuro ls") - -##### STORAGE ##### - - -def upload_code(cfg: Config) -> None: - run(f"neuro cp -r -T {cfg.local.code} {cfg.container.code}") - - -# TODO: redundant? clean where? locally? -def clean_code(cfg: Config) -> None: - # run(f"neuro rm -r {CODE_PATH_STORAGE}") - raise NotImplemented() - - -def upload_data(cfg: Config) -> None: - # run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") - raise NotImplemented() - - -def clean_data(cfg: Config) -> None: - # run(f"neuro rm -r {DATA_PATH_STORAGE}") - raise NotImplemented() - - -def upload_notebooks(cfg: Config) -> None: - # run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") - raise NotImplemented() - - -def download_notebooks(cfg: Config) -> None: - # run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") - raise NotImplemented() - - -def clean_notebooks(cfg: Config) -> None: - # run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") - raise NotImplemented() - - -def upload(cfg: Config) -> None: - # upload_code() - # upload_data() - # upload_notebooks() - raise NotImplemented() - - -def clean(cfg: Config) -> None: - # clean_code() - # clean_data() - # clean_notebooks() - raise NotImplemented() - - -##### JOBS ##### - - -def run_training(cfg: Config) -> None: - # cmd = ( - # f"python {CODE_PATH_ENV}/train.py --log_dir " - # f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" - # ) - # run( - # f"neuro run --name {cfg.TRAINING_NAME} --preset gpu-small " - # f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - # f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " - # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - # f"{cfg.CUSTOM_ENV_NAME} " - # f"'{cmd}'" - # ) - raise NotImplemented() - - -def kill_training(cfg: Config) -> None: - # run(f"neuro kill {cfg.TRAINING_NAME}") - raise NotImplemented() - - -def connect_training(cfg: Config) -> None: - # run(f"neuro exec {cfg.TRAINING_NAME} bash") - raise NotImplemented() - - -def run_jupyter(cfg: Config) -> None: - # cmd = ( - # f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " - # f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" - # ) - # run( - # f"neuro run " - # f"--name {cfg.JUPYTER_NAME} " - # f"--preset gpu-small " - # f"--http 8888 --no-http-auth --detach " - # f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - # f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " - # f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " - # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - # f"{cfg.CUSTOM_ENV_NAME} " - # f"'{cmd}'" - # ) - # run(f"neuro job browse {cfg.JUPYTER_NAME}") - raise NotImplemented() - - -def kill_jupyter(cfg: Config) -> None: - # run(f"neuro kill {cfg.JUPYTER_NAME}") - raise NotImplemented() - - -def run_tensorboard(cfg: Config) -> None: - # cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" - # run( - # f"neuro run " - # f"--name {cfg.TENSORBOARD_NAME} " - # f"--preset cpu-small " - # f"--http 6006 --no-http-auth --detach " - # f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " - # f"{cfg.CUSTOM_ENV_NAME} " - # f"'{cmd}'" - # ) - # run(f"neuro job browse {cfg.TENSORBOARD_NAME}") - raise NotImplemented() - - -def kill_tensorboard(cfg: Config) -> None: - # run(f"neuro kill {cfg.TENSORBOARD_NAME}") - raise NotImplemented() - - -def run_filebrowser(cfg: Config) -> None: - # run( - # f"neuro run " - # f"--name {cfg.FILEBROWSER_NAME} " - # f"--preset cpu-small " - # f"--http 80 --no-http-auth --detach " - # f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " - # f"filebrowser/filebrowser" - # ) - # run(f"neuro job browse {cfg.FILEBROWSER_NAME}") - raise NotImplemented() - - -def kill_filebrowser(cfg: Config) -> None: - # run(f"neuro kill {cfg.FILEBROWSER_NAME}") - raise NotImplemented() - - -def kill(cfg: Config) -> None: - # kill_training() - # kill_jupyter() - # kill_tensorboard() - # kill_filebrowser() - raise NotImplemented() - - -##### LOCAL ##### - - -def setup_local(cfg: Config) -> None: - # run("pip install -r requirements/pip.txt") - raise NotImplemented() - - -def lint(cfg: Config) -> None: - # run("flake8 .") - # run("mypy .") - raise NotImplemented() - - -def install(cfg: Config) -> None: - # run("python setup.py install --user") - raise NotImplemented() - - -##### MISC ##### - - -def ps(cfg: Config) -> None: - # run(f"neuro ps") - raise NotImplemented() - - -if __name__ == "__main__": - setup() diff --git a/neuro-shortcuts/shortcuts/config.py b/neuro-shortcuts/shortcuts/config.py index 7766d62c..a4902cc4 100644 --- a/neuro-shortcuts/shortcuts/config.py +++ b/neuro-shortcuts/shortcuts/config.py @@ -1,24 +1,215 @@ -from .internals.abc import Config, StorageProject, LocalProject, ContainerProject +from .internals.runners import run + +PROJECT_NAME = "{{cookiecutter.project_slug}}" + +CODE_PATH = "code" +DATA_PATH = "data" +NOTEBOOKS_PATH = "notebooks" +REQUIREMENTS_PATH = "requirements" +RESULTS_PATH = "results" +PROJECT_PATH_STORAGE = f"storage:{PROJECT_NAME}" +CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" +DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" +NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" +REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" +RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" + +PROJECT_PATH_ENV = "/project" +CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" +DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" +NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" +REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" +RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" SETUP_NAME = "setup" TRAINING_NAME = "training" JUPYTER_NAME = "jupyter" TENSORBOARD_NAME = "tensorboard" FILEBROWSER_NAME = "filebrowser" + BASE_ENV_NAME = "image:neuro/base" CUSTOM_ENV_NAME = "image:neuro/custom" -def create_config(project_name: str) -> Config: - return Config( - local=LocalProject(), - storage=StorageProject(project_name), - container=ContainerProject(), - SETUP_NAME=SETUP_NAME, - TRAINING_NAME=TRAINING_NAME, - JUPYTER_NAME=JUPYTER_NAME, - TENSORBOARD_NAME=TENSORBOARD_NAME, - FILEBROWSER_NAME=FILEBROWSER_NAME, - BASE_ENV_NAME=BASE_ENV_NAME, - CUSTOM_ENV_NAME=CUSTOM_ENV_NAME, - ) \ No newline at end of file +##### SETUP ##### + + +def setup(): + run(f"neuro kill {SETUP_NAME}") + cmd = ( + f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " + f"--volume {PROJECT_PATH_STORAGE}:{PROJECT_PATH_ENV}:ro " + f"{BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' + ) + run(cmd) + run(f"neuro cp -r {REQUIREMENTS_PATH} {REQUIREMENTS_PATH_STORAGE}") + # TODO: see below + # For some reason the second command fail + # neuro exec {SETUP_NAME} 'apt-get update' + # neuro exec {SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' + run( + f"neuro exec {SETUP_NAME} 'pip install -r {REQUIREMENTS_PATH_ENV}/pip.txt'" + ) + run(f"neuro job save {SETUP_NAME} {CUSTOM_ENV_NAME}") + run(f"neuro kill {SETUP_NAME}") + + +def test(): + run("neuro ls") + +##### STORAGE ##### + + +def upload_code() -> None: + run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_ENV}") + + +# TODO: redundant? clean where? locally? +def clean_code() -> None: + run(f"neuro rm -r {CODE_PATH_STORAGE}") + + +def upload_data() -> None: + run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") + + +def clean_data() -> None: + run(f"neuro rm -r {DATA_PATH_STORAGE}") + + +def upload_notebooks() -> None: + run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") + + +def download_notebooks() -> None: + run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") + + +def clean_notebooks() -> None: + run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") + + +def upload() -> None: + upload_code() + upload_data() + upload_notebooks() + + +def clean() -> None: + clean_code() + clean_data() + clean_notebooks() + + +##### JOBS ##### + + +def run_training() -> None: + cmd = ( + f"python {CODE_PATH_ENV}/train.py --log_dir " + f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" + ) + run( + f"neuro run --name {TRAINING_NAME} --preset gpu-small " + f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + + +def kill_training() -> None: + run(f"neuro kill {TRAINING_NAME}") + + +def connect_training() -> None: + run(f"neuro exec {TRAINING_NAME} bash") + + +def run_jupyter() -> None: + cmd = ( + f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " + f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" + ) + run( + f"neuro run " + f"--name {JUPYTER_NAME} " + f"--preset gpu-small " + f"--http 8888 --no-http-auth --detach " + f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " + f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + run(f"neuro job browse {JUPYTER_NAME}") + + +def kill_jupyter() -> None: + run(f"neuro kill {JUPYTER_NAME}") + + +def run_tensorboard() -> None: + cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" + run( + f"neuro run " + f"--name {TENSORBOARD_NAME} " + f"--preset cpu-small " + f"--http 6006 --no-http-auth --detach " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + run(f"neuro job browse {TENSORBOARD_NAME}") + + +def kill_tensorboard() -> None: + run(f"neuro kill {TENSORBOARD_NAME}") + + +def run_filebrowser() -> None: + run( + f"neuro run " + f"--name {FILEBROWSER_NAME} " + f"--preset cpu-small " + f"--http 80 --no-http-auth --detach " + f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " + f"filebrowser/filebrowser" + ) + run(f"neuro job browse {FILEBROWSER_NAME}") + + +def kill_filebrowser() -> None: + run(f"neuro kill {FILEBROWSER_NAME}") + + +def kill() -> None: + kill_training() + kill_jupyter() + kill_tensorboard() + kill_filebrowser() + + +##### LOCAL ##### + + +def setup_local() -> None: + run("pip install -r requirements/pip.txt") + + +def lint() -> None: + run("flake8 .") + run("mypy .") + + +def install() -> None: + run("python setup.py install --user") + + +##### MISC ##### + + +def ps() -> None: + run(f"neuro ps") diff --git a/neuro-shortcuts/shortcuts/internals/abc.py b/neuro-shortcuts/shortcuts/internals/abc.py index d5e962c2..7459cdb4 100644 --- a/neuro-shortcuts/shortcuts/internals/abc.py +++ b/neuro-shortcuts/shortcuts/internals/abc.py @@ -1,68 +1,68 @@ -import abc -from dataclasses import dataclass - - -class Project(abc.ABC): - @abc.abstractmethod - def root(self) -> str: - pass - - @property - def data(self) -> str: - return f"{self.root}/data" - - @property - def code(self) -> str: - return f"{self.root}/code" - - @property - def notebooks(self) -> str: - return f"{self.root}/notebooks" - - @property - def requirements(self) -> str: - return f"{self.root}/requirements" - - @property - def results(self) -> str: - return f"{self.root}/results" - - -class StorageProject(Project): - def __init__(self, project_name: str) -> None: - self._project_name = project_name - - def root(self) -> str: - return f"storage:{self._project_name}" - - -class LocalProject(Project): - def __init__(self): - # TODO (artem) remember `pwd` as `self._project_path` - pass - - def root(self) -> str: - # TODO: return self._project_path - raise NotImplemented() - - -class ContainerProject(Project): - def root(self) -> str: - # TODO: always in the root? - return "/project" - - -@dataclass -class Config: - local: LocalProject - storage: StorageProject - container: ContainerProject - - # TODO: cleanup - SETUP_NAME: str - TRAINING_NAME: str - JUPYTER_NAME: str - TENSORBOARD_NAME: str - FILEBROWSER_NAME: str - BASE_ENV_NAME: str - CUSTOM_ENV_NAME: str +# import abc +# from dataclasses import dataclass +# +# +# class Project(abc.ABC): +# @abc.abstractmethod +# def root(self) -> str: +# pass +# +# @property +# def data(self) -> str: +# return f"{self.root}/data" +# +# @property +# def code(self) -> str: +# return f"{self.root}/code" +# +# @property +# def notebooks(self) -> str: +# return f"{self.root}/notebooks" +# +# @property +# def requirements(self) -> str: +# return f"{self.root}/requirements" +# +# @property +# def results(self) -> str: +# return f"{self.root}/results" +# +# +# class StorageProject(Project): +# def __init__(self, project_name: str) -> None: +# self._project_name = project_name +# +# def root(self) -> str: +# return f"storage:{self._project_name}" +# +# +# class LocalProject(Project): +# def __init__(self): +# # TODO (artem) remember `pwd` as `self._project_path` +# pass +# +# def root(self) -> str: +# # TODO: return self._project_path +# raise NotImplemented() +# +# +# class ContainerProject(Project): +# def root(self) -> str: +# # TODO: always in the root? +# return "/project" +# +# +# @dataclass +# class Config: +# local: LocalProject +# storage: StorageProject +# container: ContainerProject +# +# # TODO: cleanup +# SETUP_NAME: str +# TRAINING_NAME: str +# JUPYTER_NAME: str +# TENSORBOARD_NAME: str +# FILEBROWSER_NAME: str +# BASE_ENV_NAME: str +# CUSTOM_ENV_NAME: str From ab380e2a37347a767c44b93103a83875a1288f1f Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 18:25:41 +0300 Subject: [PATCH 09/15] final --- neuro-shortcuts/Makefile | 2 +- neuro-shortcuts/neu.py | 304 ++++++++++++++++-- neuro-shortcuts/setup.cfg | 9 - neuro-shortcuts/shortcuts/__init__.py | 0 neuro-shortcuts/shortcuts/config.py | 215 ------------- .../shortcuts/internals/__init__.py | 0 neuro-shortcuts/shortcuts/internals/abc.py | 68 ---- .../shortcuts/internals/loaders.py | 20 -- .../shortcuts/internals/runners.py | 112 ------- 9 files changed, 282 insertions(+), 448 deletions(-) delete mode 100644 neuro-shortcuts/shortcuts/__init__.py delete mode 100644 neuro-shortcuts/shortcuts/config.py delete mode 100644 neuro-shortcuts/shortcuts/internals/__init__.py delete mode 100644 neuro-shortcuts/shortcuts/internals/abc.py delete mode 100644 neuro-shortcuts/shortcuts/internals/loaders.py delete mode 100644 neuro-shortcuts/shortcuts/internals/runners.py diff --git a/neuro-shortcuts/Makefile b/neuro-shortcuts/Makefile index 4ada006a..e937b85f 100644 --- a/neuro-shortcuts/Makefile +++ b/neuro-shortcuts/Makefile @@ -1,4 +1,4 @@ -ISORT_DIRS := neuro-shortcuts setup.py +ISORT_DIRS := neu.py setup.py BLACK_DIRS := $(ISORT_DIRS) MYPY_DIRS := tests diff --git a/neuro-shortcuts/neu.py b/neuro-shortcuts/neu.py index 2ba2598f..06ea730b 100644 --- a/neuro-shortcuts/neu.py +++ b/neuro-shortcuts/neu.py @@ -1,42 +1,300 @@ -import pathlib +PROJECT_NAME = "{{cookiecutter.project_slug}}" -from shortcuts.config import create_config -from shortcuts.internals.loaders import collect_entrypoints -from argparse import ArgumentParser -API_MODULE_NAME = "shortcuts.api" +CODE_PATH = "code" +DATA_PATH = "data" +NOTEBOOKS_PATH = "notebooks" +REQUIREMENTS_PATH = "requirements" +RESULTS_PATH = "results" +PROJECT_PATH_STORAGE = f"storage:{PROJECT_NAME}" +CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" +DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" +NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" +REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" +RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" +PROJECT_PATH_ENV = "/project" +CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" +DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" +NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" +REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" +RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" -def create_parser() -> ArgumentParser: - parser = ArgumentParser(description='TODO root') - parser.add_argument('action', metavar='ACTION', type=str, - help='TODO action') - return parser +SETUP_NAME = "setup" +TRAINING_NAME = "training" +JUPYTER_NAME = "jupyter" +TENSORBOARD_NAME = "tensorboard" +FILEBROWSER_NAME = "filebrowser" -def get_project_name() -> str: - """ We assume that the parent directory is named same as the project - """ - current_dir = pathlib.Path(__file__).resolve() - return str(current_dir.parent) +BASE_ENV_NAME = "image:neuro/base" +CUSTOM_ENV_NAME = "image:neuro/custom" -if __name__ == "__main__": +# ##### SETUP ##### + + +def setup(): + run(f"neuro kill {SETUP_NAME}", check_return_code=False) + cmd = ( + f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " + f"--volume {PROJECT_PATH_STORAGE}:{PROJECT_PATH_ENV}:ro " + f"{BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' + ) + run(cmd) + run(f"neuro cp -r {REQUIREMENTS_PATH} {REQUIREMENTS_PATH_STORAGE}") + # TODO: fix commands below + # For some reason the second command fail + # neuro exec {SETUP_NAME} 'apt-get update' + # neuro exec {SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' # noqa + run(f"neuro exec {SETUP_NAME} 'pip install -r {REQUIREMENTS_PATH_ENV}/pip.txt'") + run(f"neuro job save {SETUP_NAME} {CUSTOM_ENV_NAME}") + run(f"neuro kill {SETUP_NAME}", check_return_code=False) + + +def test(): + run("neuro ls") + + +# ##### STORAGE ##### + + +def upload_code() -> None: + run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_ENV}") + + +# TODO: redundant? clean where? locally? +def clean_code() -> None: + run(f"neuro rm -r {CODE_PATH_STORAGE}") + + +def upload_data() -> None: + run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") + + +def clean_data() -> None: + run(f"neuro rm -r {DATA_PATH_STORAGE}") + + +def upload_notebooks() -> None: + run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") + + +def download_notebooks() -> None: + run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") + + +def clean_notebooks() -> None: + run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") + + +def upload() -> None: + upload_code() + upload_data() + upload_notebooks() + + +def clean() -> None: + clean_code() + clean_data() + clean_notebooks() + + +# ##### JOBS ##### + + +def run_training() -> None: + cmd = ( + f"python {CODE_PATH_ENV}/train.py --log_dir " + f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" + ) + run( + f"neuro run --name {TRAINING_NAME} --preset gpu-small " + f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + + +def kill_training() -> None: + run(f"neuro kill {TRAINING_NAME}") + + +def connect_training() -> None: + run(f"neuro exec {TRAINING_NAME} bash") + + +def run_jupyter() -> None: + cmd = ( + f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " + f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" + ) + run( + f"neuro run " + f"--name {JUPYTER_NAME} " + f"--preset gpu-small " + f"--http 8888 --no-http-auth --detach " + f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " + f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + run(f"neuro job browse {JUPYTER_NAME}") + + +def kill_jupyter() -> None: + run(f"neuro kill {JUPYTER_NAME}") + + +def run_tensorboard() -> None: + cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" + run( + f"neuro run " + f"--name {TENSORBOARD_NAME} " + f"--preset cpu-small " + f"--http 6006 --no-http-auth --detach " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + run(f"neuro job browse {TENSORBOARD_NAME}") + + +def kill_tensorboard() -> None: + run(f"neuro kill {TENSORBOARD_NAME}") + + +def run_filebrowser() -> None: + run( + f"neuro run " + f"--name {FILEBROWSER_NAME} " + f"--preset cpu-small " + f"--http 80 --no-http-auth --detach " + f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " + f"filebrowser/filebrowser" + ) + run(f"neuro job browse {FILEBROWSER_NAME}") + + +def kill_filebrowser() -> None: + run(f"neuro kill {FILEBROWSER_NAME}") + + +def kill() -> None: + kill_training() + kill_jupyter() + kill_tensorboard() + kill_filebrowser() + + +# ##### LOCAL ##### + + +def setup_local() -> None: + run("pip install -r requirements/pip.txt") + + +def lint() -> None: + run("flake8 .") + run("mypy .") + + +def install() -> None: + run("python setup.py install --user") + + +# ##### MISC ##### + + +def ps() -> None: + run(f"neuro ps") + + +# ############################# INTERNALS BELOW ############################## + + +def run(cmd: str, check_return_code: bool = True) -> None: + import shlex + import subprocess + import sys + + DEFAULT_TIMEOUT = 5 * 60 + + print(f"Runing command: '{cmd}'") + args = shlex.split(cmd) + proc = subprocess.run( + args, + timeout=DEFAULT_TIMEOUT, + encoding="utf8", + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + print(proc.stdout) + if check_return_code: + try: + proc.check_returncode() + except subprocess.CalledProcessError: + sys.stderr.write(f"ERROR: {proc.stderr}") + exit(proc.returncode) + + +def __main() -> None: + from argparse import ArgumentParser + import importlib + import sys + import typing as t + + def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[], None]]: + """ collect all callable object from module `module_name` + whose names start with a lowerase english alphabet character + """ + importlib.import_module(module_name) + module = sys.modules[module_name] + result: t.Dict[str, t.Callable] = {} + for name in dir(module): + if "a" <= name[0] <= "z": + obj = getattr(module, name) + if callable(obj): + result[name] = obj + return result + + def create_parser() -> ArgumentParser: + parser = ArgumentParser(description="Neuromation shortcuts script") + parser.add_argument( + "action", + metavar="ACTION", + type=str, + help="Shortcut action (`help` for help)", + ) + return parser + parser = create_parser() args = parser.parse_args() action_name = args.action - module_name = API_MODULE_NAME + module_name = __name__ entrypoints = collect_entrypoints(module_name) action = entrypoints.get(action_name) + + help_message = f"Available actions: {', '.join(entrypoints.keys())}" + if action_name == "help": + # TODO: add help messages + print(help_message) + return + if not action: print(f"ERROR: Cannot find action '{action_name}' in module '{module_name}'") - print(f"Available actions: {', '.join(entrypoints.keys())}") + print() exit(1) - project_name = get_project_name() - cfg = create_config(project_name) - try: - action(cfg) + action() except (TypeError, AttributeError) as e: - print(f"ERROR: Could not execute action '{action_name}': {e}") + sys.stderr.write(f"ERROR: Could not execute action '{action_name}': {e}") + exit(2) + +if __name__ == "__main__": + __main() diff --git a/neuro-shortcuts/setup.cfg b/neuro-shortcuts/setup.cfg index 1bef14f7..49c28cb5 100644 --- a/neuro-shortcuts/setup.cfg +++ b/neuro-shortcuts/setup.cfg @@ -23,12 +23,3 @@ warn_redundant_casts = True warn_unused_ignores = True warn_unused_configs = True incremental = False - -[mypy-pytest] -ignore_missing_imports = true - -[mypy-timeout_decorator] -ignore_missing_imports = true - -[mypy-pexpect] -ignore_missing_imports = true diff --git a/neuro-shortcuts/shortcuts/__init__.py b/neuro-shortcuts/shortcuts/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/neuro-shortcuts/shortcuts/config.py b/neuro-shortcuts/shortcuts/config.py deleted file mode 100644 index a4902cc4..00000000 --- a/neuro-shortcuts/shortcuts/config.py +++ /dev/null @@ -1,215 +0,0 @@ -from .internals.runners import run - -PROJECT_NAME = "{{cookiecutter.project_slug}}" - -CODE_PATH = "code" -DATA_PATH = "data" -NOTEBOOKS_PATH = "notebooks" -REQUIREMENTS_PATH = "requirements" -RESULTS_PATH = "results" -PROJECT_PATH_STORAGE = f"storage:{PROJECT_NAME}" -CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" -DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" -NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" -REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" -RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" - -PROJECT_PATH_ENV = "/project" -CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" -DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" -NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" -REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" -RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" - -SETUP_NAME = "setup" -TRAINING_NAME = "training" -JUPYTER_NAME = "jupyter" -TENSORBOARD_NAME = "tensorboard" -FILEBROWSER_NAME = "filebrowser" - -BASE_ENV_NAME = "image:neuro/base" -CUSTOM_ENV_NAME = "image:neuro/custom" - - -##### SETUP ##### - - -def setup(): - run(f"neuro kill {SETUP_NAME}") - cmd = ( - f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " - f"--volume {PROJECT_PATH_STORAGE}:{PROJECT_PATH_ENV}:ro " - f"{BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' - ) - run(cmd) - run(f"neuro cp -r {REQUIREMENTS_PATH} {REQUIREMENTS_PATH_STORAGE}") - # TODO: see below - # For some reason the second command fail - # neuro exec {SETUP_NAME} 'apt-get update' - # neuro exec {SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' - run( - f"neuro exec {SETUP_NAME} 'pip install -r {REQUIREMENTS_PATH_ENV}/pip.txt'" - ) - run(f"neuro job save {SETUP_NAME} {CUSTOM_ENV_NAME}") - run(f"neuro kill {SETUP_NAME}") - - -def test(): - run("neuro ls") - -##### STORAGE ##### - - -def upload_code() -> None: - run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_ENV}") - - -# TODO: redundant? clean where? locally? -def clean_code() -> None: - run(f"neuro rm -r {CODE_PATH_STORAGE}") - - -def upload_data() -> None: - run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") - - -def clean_data() -> None: - run(f"neuro rm -r {DATA_PATH_STORAGE}") - - -def upload_notebooks() -> None: - run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") - - -def download_notebooks() -> None: - run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") - - -def clean_notebooks() -> None: - run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") - - -def upload() -> None: - upload_code() - upload_data() - upload_notebooks() - - -def clean() -> None: - clean_code() - clean_data() - clean_notebooks() - - -##### JOBS ##### - - -def run_training() -> None: - cmd = ( - f"python {CODE_PATH_ENV}/train.py --log_dir " - f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" - ) - run( - f"neuro run --name {TRAINING_NAME} --preset gpu-small " - f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - - -def kill_training() -> None: - run(f"neuro kill {TRAINING_NAME}") - - -def connect_training() -> None: - run(f"neuro exec {TRAINING_NAME} bash") - - -def run_jupyter() -> None: - cmd = ( - f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " - f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" - ) - run( - f"neuro run " - f"--name {JUPYTER_NAME} " - f"--preset gpu-small " - f"--http 8888 --no-http-auth --detach " - f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " - f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - run(f"neuro job browse {JUPYTER_NAME}") - - -def kill_jupyter() -> None: - run(f"neuro kill {JUPYTER_NAME}") - - -def run_tensorboard() -> None: - cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" - run( - f"neuro run " - f"--name {TENSORBOARD_NAME} " - f"--preset cpu-small " - f"--http 6006 --no-http-auth --detach " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - run(f"neuro job browse {TENSORBOARD_NAME}") - - -def kill_tensorboard() -> None: - run(f"neuro kill {TENSORBOARD_NAME}") - - -def run_filebrowser() -> None: - run( - f"neuro run " - f"--name {FILEBROWSER_NAME} " - f"--preset cpu-small " - f"--http 80 --no-http-auth --detach " - f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " - f"filebrowser/filebrowser" - ) - run(f"neuro job browse {FILEBROWSER_NAME}") - - -def kill_filebrowser() -> None: - run(f"neuro kill {FILEBROWSER_NAME}") - - -def kill() -> None: - kill_training() - kill_jupyter() - kill_tensorboard() - kill_filebrowser() - - -##### LOCAL ##### - - -def setup_local() -> None: - run("pip install -r requirements/pip.txt") - - -def lint() -> None: - run("flake8 .") - run("mypy .") - - -def install() -> None: - run("python setup.py install --user") - - -##### MISC ##### - - -def ps() -> None: - run(f"neuro ps") diff --git a/neuro-shortcuts/shortcuts/internals/__init__.py b/neuro-shortcuts/shortcuts/internals/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/neuro-shortcuts/shortcuts/internals/abc.py b/neuro-shortcuts/shortcuts/internals/abc.py deleted file mode 100644 index 7459cdb4..00000000 --- a/neuro-shortcuts/shortcuts/internals/abc.py +++ /dev/null @@ -1,68 +0,0 @@ -# import abc -# from dataclasses import dataclass -# -# -# class Project(abc.ABC): -# @abc.abstractmethod -# def root(self) -> str: -# pass -# -# @property -# def data(self) -> str: -# return f"{self.root}/data" -# -# @property -# def code(self) -> str: -# return f"{self.root}/code" -# -# @property -# def notebooks(self) -> str: -# return f"{self.root}/notebooks" -# -# @property -# def requirements(self) -> str: -# return f"{self.root}/requirements" -# -# @property -# def results(self) -> str: -# return f"{self.root}/results" -# -# -# class StorageProject(Project): -# def __init__(self, project_name: str) -> None: -# self._project_name = project_name -# -# def root(self) -> str: -# return f"storage:{self._project_name}" -# -# -# class LocalProject(Project): -# def __init__(self): -# # TODO (artem) remember `pwd` as `self._project_path` -# pass -# -# def root(self) -> str: -# # TODO: return self._project_path -# raise NotImplemented() -# -# -# class ContainerProject(Project): -# def root(self) -> str: -# # TODO: always in the root? -# return "/project" -# -# -# @dataclass -# class Config: -# local: LocalProject -# storage: StorageProject -# container: ContainerProject -# -# # TODO: cleanup -# SETUP_NAME: str -# TRAINING_NAME: str -# JUPYTER_NAME: str -# TENSORBOARD_NAME: str -# FILEBROWSER_NAME: str -# BASE_ENV_NAME: str -# CUSTOM_ENV_NAME: str diff --git a/neuro-shortcuts/shortcuts/internals/loaders.py b/neuro-shortcuts/shortcuts/internals/loaders.py deleted file mode 100644 index 5e4e8a69..00000000 --- a/neuro-shortcuts/shortcuts/internals/loaders.py +++ /dev/null @@ -1,20 +0,0 @@ -import importlib -import sys -import typing as t - -from shortcuts.internals.abc import Config - - -def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[Config], None]]: - """ collect all callable object from module `module_name` - whose names start with a lowerase english alphabet character - """ - importlib.import_module(module_name) - module = sys.modules[module_name] - result: t.Dict[str, t.Callable] = {} - for name in dir(module): - if 'a' <= name[0] <= 'z': - obj = getattr(module, name) - if callable(obj): - result[name] = obj - return result diff --git a/neuro-shortcuts/shortcuts/internals/runners.py b/neuro-shortcuts/shortcuts/internals/runners.py deleted file mode 100644 index d962502e..00000000 --- a/neuro-shortcuts/shortcuts/internals/runners.py +++ /dev/null @@ -1,112 +0,0 @@ -import logging -import re -import shlex -import signal -import subprocess -import typing as t -from collections import namedtuple -from contextlib import contextmanager -from pathlib import Path -from time import sleep - - -OUT_DIRECTORY_NAME = "out" -SUBMITTED_JOBS_FILE_NAME = "submitted_jobs.txt" - - -def get_submitted_jobs_file() -> Path: - project_root = Path(__file__).resolve().parent - out_path = project_root / OUT_DIRECTORY_NAME - return out_path / SUBMITTED_JOBS_FILE_NAME - - -SUBMITTED_JOBS_FILE = get_submitted_jobs_file() - - -DEFAULT_TIMEOUT = 5 * 60 - -SysCap = namedtuple("SysCap", "out err") - -log = logging.getLogger(__name__) - -job_id_pattern = re.compile( - # pattern for UUID v4 taken here: https://stackoverflow.com/a/38191078 - r"(job-[0-9a-f]{8}-[0-9a-f]{4}-[4][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12})", - re.IGNORECASE, -) - - -@contextmanager -def timeout(time_s: int) -> t.Iterator[None]: - """ source: https://www.jujens.eu/posts/en/2018/Jun/02/python-timeout-function/ - """ - - def raise_timeout() -> t.NoReturn: - raise TimeoutError - - # Register a function to raise a TimeoutError on the signal. - signal.signal(signal.SIGALRM, raise_timeout) # type: ignore - # Schedule the signal to be sent after ``time``. - signal.alarm(time_s) - - try: - yield - except TimeoutError: - pass - finally: - # Unregister the signal so it won't be triggered - # if the timeout is not reached. - signal.signal(signal.SIGALRM, signal.SIG_IGN) - - -def fire_and_forget(cmd: str) -> subprocess.Popen: - proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) - return proc - - -def wait_for_output( - cmd: str, expect_stdin: str, timeout_s: int = DEFAULT_TIMEOUT -) -> None: - delay_s = 1 - with timeout(timeout_s): - while True: - try: - captured = run(cmd, timeout_s=timeout_s // 5) - if captured.err: - print(f"stderr: `{captured.err}`") - except subprocess.CalledProcessError as e: - log.error(f"Caught error: {e}, retrying") - continue - if expect_stdin in captured.out: - return - sleep(delay_s) - - -def run(cmd: str, timeout_s: int = DEFAULT_TIMEOUT) -> SysCap: - log.info(f"Runing command: '{cmd}'") - print(f"Runing command: '{cmd}'") # TODO : debug <-- - args = shlex.split(cmd) - proc = subprocess.run( - args, - timeout=timeout_s, - encoding="utf8", - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - try: - proc.check_returncode() - except subprocess.CalledProcessError: - log.error(f"Last stdout: '{proc.stdout}'") - log.error(f"Last stderr: '{proc.stderr}'") - raise - out = proc.stdout - err = proc.stderr - if any(start in " ".join(args) for start in ("submit", "run")): - match = job_id_pattern.search(out) - if match: - job_id = match.group(1) - with SUBMITTED_JOBS_FILE.open("a") as f: - f.write(job_id + "\n") - out = out.strip() - err = err.strip() - return SysCap(out, err) From b4a39760ba5666414148bf41b7a9bdf1823a22f4 Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 18:26:38 +0300 Subject: [PATCH 10/15] clenaup --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 133ff967..6d4723e6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -23,7 +23,7 @@ jobs: # Download and cache dependencies - restore_cache: keys: - - v1-dependencies-{{ checksum "requirements-requirements-dev.txt" }} + - v1-dependencies-{{ checksum "requirements-dev.txt" }} # fallback to using the latest cache if no exact match is found - v1-dependencies- @@ -37,7 +37,7 @@ jobs: - save_cache: paths: - ./venv - key: v1-dependencies-{{ checksum "requirements-requirements-dev.txt" }} + key: v1-dependencies-{{ checksum "requirements-dev.txt" }} - run: name: run tests From 2490199f048e153c2337cccb1616e23ec9a48a2f Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 18:28:05 +0300 Subject: [PATCH 11/15] fix --- neuro-shortcuts/neu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuro-shortcuts/neu.py b/neuro-shortcuts/neu.py index 06ea730b..15a824ac 100644 --- a/neuro-shortcuts/neu.py +++ b/neuro-shortcuts/neu.py @@ -1,6 +1,6 @@ PROJECT_NAME = "{{cookiecutter.project_slug}}" -CODE_PATH = "code" +CODE_PATH = PROJECT_NAME DATA_PATH = "data" NOTEBOOKS_PATH = "notebooks" REQUIREMENTS_PATH = "requirements" From d53ac957033749337777c9619af0eb5beaf7dc2b Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 18:33:07 +0300 Subject: [PATCH 12/15] cleanup --- neuro-shortcuts/neu.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/neuro-shortcuts/neu.py b/neuro-shortcuts/neu.py index 15a824ac..1b82a00e 100644 --- a/neuro-shortcuts/neu.py +++ b/neuro-shortcuts/neu.py @@ -240,12 +240,14 @@ def run(cmd: str, check_return_code: bool = True) -> None: exit(proc.returncode) -def __main() -> None: +def main() -> None: from argparse import ArgumentParser import importlib import sys import typing as t + RESERVED_ACTION_NAMES = ("run", "main") + def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[], None]]: """ collect all callable object from module `module_name` whose names start with a lowerase english alphabet character @@ -254,7 +256,7 @@ def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[], None]]: module = sys.modules[module_name] result: t.Dict[str, t.Callable] = {} for name in dir(module): - if "a" <= name[0] <= "z": + if "a" <= name[0] <= "z" and name not in RESERVED_ACTION_NAMES: obj = getattr(module, name) if callable(obj): result[name] = obj @@ -285,7 +287,7 @@ def create_parser() -> ArgumentParser: return if not action: - print(f"ERROR: Cannot find action '{action_name}' in module '{module_name}'") + print(f"ERROR: Cannot find action '{action_name}'") print() exit(1) @@ -297,4 +299,4 @@ def create_parser() -> ArgumentParser: if __name__ == "__main__": - __main() + main() From fd897775cccaddc4acc5380020f423e68c481957 Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 18:36:13 +0300 Subject: [PATCH 13/15] clenaup --- neuro-shortcuts/neu.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) mode change 100644 => 100755 neuro-shortcuts/neu.py diff --git a/neuro-shortcuts/neu.py b/neuro-shortcuts/neu.py old mode 100644 new mode 100755 index 1b82a00e..7c58109f --- a/neuro-shortcuts/neu.py +++ b/neuro-shortcuts/neu.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + PROJECT_NAME = "{{cookiecutter.project_slug}}" CODE_PATH = PROJECT_NAME @@ -214,8 +216,11 @@ def ps() -> None: # ############################# INTERNALS BELOW ############################## +RESERVED_ACTION_NAMES = ("run", "main") + def run(cmd: str, check_return_code: bool = True) -> None: + # local import so that only user-defined actions are declared in this file import shlex import subprocess import sys @@ -241,13 +246,12 @@ def run(cmd: str, check_return_code: bool = True) -> None: def main() -> None: + # local import so that only user-defined actions are declared in this file from argparse import ArgumentParser import importlib import sys import typing as t - RESERVED_ACTION_NAMES = ("run", "main") - def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[], None]]: """ collect all callable object from module `module_name` whose names start with a lowerase english alphabet character From 1cfea67edab4a6102530ff0b104f58b5f4ca4f3f Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Fri, 6 Sep 2019 18:51:04 +0300 Subject: [PATCH 14/15] cleanup! --- neuro-shortcuts/neu.py | 29 ++++++++++++----------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/neuro-shortcuts/neu.py b/neuro-shortcuts/neu.py index 7c58109f..2f1da600 100755 --- a/neuro-shortcuts/neu.py +++ b/neuro-shortcuts/neu.py @@ -35,13 +35,13 @@ def setup(): - run(f"neuro kill {SETUP_NAME}", check_return_code=False) - cmd = ( + run(f"neuro kill {SETUP_NAME}", assert_success=False) + command = "sleep 1h" + run( f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " f"--volume {PROJECT_PATH_STORAGE}:{PROJECT_PATH_ENV}:ro " - f"{BASE_ENV_NAME} 'tail -f /dev/null'" # TODO: must be 'sleep 1h' + f"{BASE_ENV_NAME} '{command}'" ) - run(cmd) run(f"neuro cp -r {REQUIREMENTS_PATH} {REQUIREMENTS_PATH_STORAGE}") # TODO: fix commands below # For some reason the second command fail @@ -49,21 +49,16 @@ def setup(): # neuro exec {SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' # noqa run(f"neuro exec {SETUP_NAME} 'pip install -r {REQUIREMENTS_PATH_ENV}/pip.txt'") run(f"neuro job save {SETUP_NAME} {CUSTOM_ENV_NAME}") - run(f"neuro kill {SETUP_NAME}", check_return_code=False) - - -def test(): - run("neuro ls") + run(f"neuro kill {SETUP_NAME}", assert_success=False) # ##### STORAGE ##### def upload_code() -> None: - run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_ENV}") + run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_STORAGE}") -# TODO: redundant? clean where? locally? def clean_code() -> None: run(f"neuro rm -r {CODE_PATH_STORAGE}") @@ -103,7 +98,7 @@ def clean() -> None: # ##### JOBS ##### -def run_training() -> None: +def training() -> None: cmd = ( f"python {CODE_PATH_ENV}/train.py --log_dir " f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" @@ -126,7 +121,7 @@ def connect_training() -> None: run(f"neuro exec {TRAINING_NAME} bash") -def run_jupyter() -> None: +def jupyter() -> None: cmd = ( f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" @@ -150,7 +145,7 @@ def kill_jupyter() -> None: run(f"neuro kill {JUPYTER_NAME}") -def run_tensorboard() -> None: +def tensorboard() -> None: cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" run( f"neuro run " @@ -168,7 +163,7 @@ def kill_tensorboard() -> None: run(f"neuro kill {TENSORBOARD_NAME}") -def run_filebrowser() -> None: +def filebrowser() -> None: run( f"neuro run " f"--name {FILEBROWSER_NAME} " @@ -219,7 +214,7 @@ def ps() -> None: RESERVED_ACTION_NAMES = ("run", "main") -def run(cmd: str, check_return_code: bool = True) -> None: +def run(cmd: str, assert_success: bool = True) -> None: # local import so that only user-defined actions are declared in this file import shlex import subprocess @@ -237,7 +232,7 @@ def run(cmd: str, check_return_code: bool = True) -> None: stderr=subprocess.PIPE, ) print(proc.stdout) - if check_return_code: + if assert_success: try: proc.check_returncode() except subprocess.CalledProcessError: From 9076cf90e5b7415c3430dd3956f2b738171689e7 Mon Sep 17 00:00:00 2001 From: Artem Yushkovskiy Date: Mon, 9 Sep 2019 15:26:43 +0300 Subject: [PATCH 15/15] implement via invoke --- neuro-shortcuts/Makefile | 2 +- neuro-shortcuts/invoke.yaml | 4 + neuro-shortcuts/neu.py | 301 ------------------------------------ neuro-shortcuts/tasks.py | 242 +++++++++++++++++++++++++++++ 4 files changed, 247 insertions(+), 302 deletions(-) create mode 100644 neuro-shortcuts/invoke.yaml delete mode 100755 neuro-shortcuts/neu.py create mode 100755 neuro-shortcuts/tasks.py diff --git a/neuro-shortcuts/Makefile b/neuro-shortcuts/Makefile index e937b85f..a31ae1a3 100644 --- a/neuro-shortcuts/Makefile +++ b/neuro-shortcuts/Makefile @@ -1,4 +1,4 @@ -ISORT_DIRS := neu.py setup.py +ISORT_DIRS := tasks.py setup.py BLACK_DIRS := $(ISORT_DIRS) MYPY_DIRS := tests diff --git a/neuro-shortcuts/invoke.yaml b/neuro-shortcuts/invoke.yaml new file mode 100644 index 00000000..9944a540 --- /dev/null +++ b/neuro-shortcuts/invoke.yaml @@ -0,0 +1,4 @@ +debug: false +run: + echo: true + color: true \ No newline at end of file diff --git a/neuro-shortcuts/neu.py b/neuro-shortcuts/neu.py deleted file mode 100755 index 2f1da600..00000000 --- a/neuro-shortcuts/neu.py +++ /dev/null @@ -1,301 +0,0 @@ -#!/usr/bin/env python - -PROJECT_NAME = "{{cookiecutter.project_slug}}" - -CODE_PATH = PROJECT_NAME -DATA_PATH = "data" -NOTEBOOKS_PATH = "notebooks" -REQUIREMENTS_PATH = "requirements" -RESULTS_PATH = "results" -PROJECT_PATH_STORAGE = f"storage:{PROJECT_NAME}" -CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" -DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" -NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" -REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" -RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" - -PROJECT_PATH_ENV = "/project" -CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" -DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" -NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" -REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" -RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" - -SETUP_NAME = "setup" -TRAINING_NAME = "training" -JUPYTER_NAME = "jupyter" -TENSORBOARD_NAME = "tensorboard" -FILEBROWSER_NAME = "filebrowser" - -BASE_ENV_NAME = "image:neuro/base" -CUSTOM_ENV_NAME = "image:neuro/custom" - - -# ##### SETUP ##### - - -def setup(): - run(f"neuro kill {SETUP_NAME}", assert_success=False) - command = "sleep 1h" - run( - f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " - f"--volume {PROJECT_PATH_STORAGE}:{PROJECT_PATH_ENV}:ro " - f"{BASE_ENV_NAME} '{command}'" - ) - run(f"neuro cp -r {REQUIREMENTS_PATH} {REQUIREMENTS_PATH_STORAGE}") - # TODO: fix commands below - # For some reason the second command fail - # neuro exec {SETUP_NAME} 'apt-get update' - # neuro exec {SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' # noqa - run(f"neuro exec {SETUP_NAME} 'pip install -r {REQUIREMENTS_PATH_ENV}/pip.txt'") - run(f"neuro job save {SETUP_NAME} {CUSTOM_ENV_NAME}") - run(f"neuro kill {SETUP_NAME}", assert_success=False) - - -# ##### STORAGE ##### - - -def upload_code() -> None: - run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_STORAGE}") - - -def clean_code() -> None: - run(f"neuro rm -r {CODE_PATH_STORAGE}") - - -def upload_data() -> None: - run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") - - -def clean_data() -> None: - run(f"neuro rm -r {DATA_PATH_STORAGE}") - - -def upload_notebooks() -> None: - run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") - - -def download_notebooks() -> None: - run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") - - -def clean_notebooks() -> None: - run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") - - -def upload() -> None: - upload_code() - upload_data() - upload_notebooks() - - -def clean() -> None: - clean_code() - clean_data() - clean_notebooks() - - -# ##### JOBS ##### - - -def training() -> None: - cmd = ( - f"python {CODE_PATH_ENV}/train.py --log_dir " - f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" - ) - run( - f"neuro run --name {TRAINING_NAME} --preset gpu-small " - f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - - -def kill_training() -> None: - run(f"neuro kill {TRAINING_NAME}") - - -def connect_training() -> None: - run(f"neuro exec {TRAINING_NAME} bash") - - -def jupyter() -> None: - cmd = ( - f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " - f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" - ) - run( - f"neuro run " - f"--name {JUPYTER_NAME} " - f"--preset gpu-small " - f"--http 8888 --no-http-auth --detach " - f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " - f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " - f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - run(f"neuro job browse {JUPYTER_NAME}") - - -def kill_jupyter() -> None: - run(f"neuro kill {JUPYTER_NAME}") - - -def tensorboard() -> None: - cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" - run( - f"neuro run " - f"--name {TENSORBOARD_NAME} " - f"--preset cpu-small " - f"--http 6006 --no-http-auth --detach " - f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " - f"{CUSTOM_ENV_NAME} " - f"'{cmd}'" - ) - run(f"neuro job browse {TENSORBOARD_NAME}") - - -def kill_tensorboard() -> None: - run(f"neuro kill {TENSORBOARD_NAME}") - - -def filebrowser() -> None: - run( - f"neuro run " - f"--name {FILEBROWSER_NAME} " - f"--preset cpu-small " - f"--http 80 --no-http-auth --detach " - f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " - f"filebrowser/filebrowser" - ) - run(f"neuro job browse {FILEBROWSER_NAME}") - - -def kill_filebrowser() -> None: - run(f"neuro kill {FILEBROWSER_NAME}") - - -def kill() -> None: - kill_training() - kill_jupyter() - kill_tensorboard() - kill_filebrowser() - - -# ##### LOCAL ##### - - -def setup_local() -> None: - run("pip install -r requirements/pip.txt") - - -def lint() -> None: - run("flake8 .") - run("mypy .") - - -def install() -> None: - run("python setup.py install --user") - - -# ##### MISC ##### - - -def ps() -> None: - run(f"neuro ps") - - -# ############################# INTERNALS BELOW ############################## - -RESERVED_ACTION_NAMES = ("run", "main") - - -def run(cmd: str, assert_success: bool = True) -> None: - # local import so that only user-defined actions are declared in this file - import shlex - import subprocess - import sys - - DEFAULT_TIMEOUT = 5 * 60 - - print(f"Runing command: '{cmd}'") - args = shlex.split(cmd) - proc = subprocess.run( - args, - timeout=DEFAULT_TIMEOUT, - encoding="utf8", - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - print(proc.stdout) - if assert_success: - try: - proc.check_returncode() - except subprocess.CalledProcessError: - sys.stderr.write(f"ERROR: {proc.stderr}") - exit(proc.returncode) - - -def main() -> None: - # local import so that only user-defined actions are declared in this file - from argparse import ArgumentParser - import importlib - import sys - import typing as t - - def collect_entrypoints(module_name: str) -> t.Dict[str, t.Callable[[], None]]: - """ collect all callable object from module `module_name` - whose names start with a lowerase english alphabet character - """ - importlib.import_module(module_name) - module = sys.modules[module_name] - result: t.Dict[str, t.Callable] = {} - for name in dir(module): - if "a" <= name[0] <= "z" and name not in RESERVED_ACTION_NAMES: - obj = getattr(module, name) - if callable(obj): - result[name] = obj - return result - - def create_parser() -> ArgumentParser: - parser = ArgumentParser(description="Neuromation shortcuts script") - parser.add_argument( - "action", - metavar="ACTION", - type=str, - help="Shortcut action (`help` for help)", - ) - return parser - - parser = create_parser() - args = parser.parse_args() - action_name = args.action - - module_name = __name__ - entrypoints = collect_entrypoints(module_name) - action = entrypoints.get(action_name) - - help_message = f"Available actions: {', '.join(entrypoints.keys())}" - if action_name == "help": - # TODO: add help messages - print(help_message) - return - - if not action: - print(f"ERROR: Cannot find action '{action_name}'") - print() - exit(1) - - try: - action() - except (TypeError, AttributeError) as e: - sys.stderr.write(f"ERROR: Could not execute action '{action_name}': {e}") - exit(2) - - -if __name__ == "__main__": - main() diff --git a/neuro-shortcuts/tasks.py b/neuro-shortcuts/tasks.py new file mode 100755 index 00000000..0d3987a2 --- /dev/null +++ b/neuro-shortcuts/tasks.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python + +from invoke import task as task + + +PROJECT_NAME = "{{cookiecutter.project_slug}}" + +CODE_PATH = PROJECT_NAME +DATA_PATH = "data" +NOTEBOOKS_PATH = "notebooks" +REQUIREMENTS_PATH = "requirements" +RESULTS_PATH = "results" +PROJECT_PATH_STORAGE = f"storage:{PROJECT_NAME}" +CODE_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{CODE_PATH}" +DATA_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{DATA_PATH}" +NOTEBOOKS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{NOTEBOOKS_PATH}" +REQUIREMENTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{REQUIREMENTS_PATH}" +RESULTS_PATH_STORAGE = f"{PROJECT_PATH_STORAGE}/{RESULTS_PATH}" + +PROJECT_PATH_ENV = "/project" +CODE_PATH_ENV = f"{PROJECT_PATH_ENV}/{CODE_PATH}" +DATA_PATH_ENV = f"{PROJECT_PATH_ENV}/{DATA_PATH}" +NOTEBOOKS_PATH_ENV = f"{PROJECT_PATH_ENV}/{NOTEBOOKS_PATH}" +REQUIREMENTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{REQUIREMENTS_PATH}" +RESULTS_PATH_ENV = f"{PROJECT_PATH_ENV}/{RESULTS_PATH}" + +SETUP_NAME = "setup" +TRAINING_NAME = "training" +JUPYTER_NAME = "jupyter" +TENSORBOARD_NAME = "tensorboard" +FILEBROWSER_NAME = "filebrowser" + +BASE_ENV_NAME = "image:neuro/base" +CUSTOM_ENV_NAME = "image:neuro/custom" + + +# ##### SETUP ##### + + +@task +def help(context): + context.run("invoke --list") + + +@task +def setup(context): + """ This is documentation for setup + """ + context.run(f"neuro kill {SETUP_NAME}") + command = "sleep 1h" + context.run( + f"neuro run --name {SETUP_NAME} --preset cpu-small --detach " + f"--volume {PROJECT_PATH_STORAGE}:{PROJECT_PATH_ENV}:ro " + f"{BASE_ENV_NAME} '{command}'" + ) + context.run(f"neuro cp -r {REQUIREMENTS_PATH} {REQUIREMENTS_PATH_STORAGE}") + # TODO: fix commands below + # For some reason the second command fail + # neuro exec {SETUP_NAME} 'apt-get update' + # neuro exec {SETUP_NAME} 'cat {REQUIREMENTS_PATH_ENV}/apt.txt | xargs apt-get install -y' # noqa + context.run( + f"neuro exec {SETUP_NAME} 'pip install -r {REQUIREMENTS_PATH_ENV}/pip.txt'" + ) + context.run(f"neuro job save {SETUP_NAME} {CUSTOM_ENV_NAME}") + context.run(f"neuro kill {SETUP_NAME}") + + +# ##### STORAGE ##### + + +@task +def upload_code(context): + context.run(f"neuro cp -r -T {CODE_PATH} {CODE_PATH_STORAGE}") + + +@task +def clean_code(context): + context.run(f"neuro rm -r {CODE_PATH_STORAGE}") + + +@task +def upload_data(context): + context.run(f"neuro storage load -p -u -T {DATA_PATH} {DATA_PATH_STORAGE}") + + +@task +def clean_data(context): + context.run(f"neuro rm -r {DATA_PATH_STORAGE}") + + +@task +def upload_notebooks(context): + context.run(f"neuro cp -r -T {NOTEBOOKS_PATH} {NOTEBOOKS_PATH_STORAGE}") + + +@task +def download_notebooks(context): + context.run(f"neuro cp -r {NOTEBOOKS_PATH_STORAGE} {NOTEBOOKS_PATH}") + + +@task +def clean_notebooks(context): + context.run(f"neuro rm -r {NOTEBOOKS_PATH_STORAGE}") + + +@task +def upload(context): + upload_code() + upload_data() + upload_notebooks() + + +@task +def clean(context): + clean_code() + clean_data() + clean_notebooks() + + +# ##### JOBS ##### + + +@task +def training(context): + cmd = ( + f"python {CODE_PATH_ENV}/train.py --log_dir " + f"{RESULTS_PATH_ENV} --data_root {DATA_PATH_ENV}/cifar10" + ) + context.run( + f"neuro context.run --name {TRAINING_NAME} --preset gpu-small " + f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:ro " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + + +@task +def kill_training(context): + context.run(f"neuro kill {TRAINING_NAME}") + + +@task +def connect_training(context): + context.run(f"neuro exec {TRAINING_NAME} bash") + + +@task +def jupyter(context): + cmd = ( + f"jupyter notebook --no-browser --ip=0.0.0.0 --allow-root " + f"--NotebookApp.token= --notebook-dir={NOTEBOOKS_PATH_ENV}" + ) + context.run( + f"neuro context.run " + f"--name {JUPYTER_NAME} " + f"--preset gpu-small " + f"--http 8888 --no-http-auth --detach " + f"--volume {DATA_PATH_STORAGE}:{DATA_PATH_ENV}:ro " + f"--volume {CODE_PATH_STORAGE}:{CODE_PATH_ENV}:rw " + f"--volume {NOTEBOOKS_PATH_STORAGE}:{NOTEBOOKS_PATH_ENV}:rw " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:rw " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + context.run(f"neuro job browse {JUPYTER_NAME}") + + +@task +def kill_jupyter(context): + context.run(f"neuro kill {JUPYTER_NAME}") + + +@task +def tensorboard(context): + cmd = f"tensorboard --logdir={RESULTS_PATH_ENV}" + context.run( + f"neuro context.run " + f"--name {TENSORBOARD_NAME} " + f"--preset cpu-small " + f"--http 6006 --no-http-auth --detach " + f"--volume {RESULTS_PATH_STORAGE}:{RESULTS_PATH_ENV}:ro " + f"{CUSTOM_ENV_NAME} " + f"'{cmd}'" + ) + context.run(f"neuro job browse {TENSORBOARD_NAME}") + + +@task +def kill_tensorboard(context): + context.run(f"neuro kill {TENSORBOARD_NAME}") + + +@task +def filebrowser(context): + context.run( + f"neuro context.run " + f"--name {FILEBROWSER_NAME} " + f"--preset cpu-small " + f"--http 80 --no-http-auth --detach " + f"--volume {PROJECT_PATH_STORAGE}:/srv:rw " + f"filebrowser/filebrowser" + ) + context.run(f"neuro job browse {FILEBROWSER_NAME}") + + +@task +def kill_filebrowser(context): + context.run(f"neuro kill {FILEBROWSER_NAME}") + + +@task(pre=[kill_training, kill_jupyter, kill_tensorboard, kill_filebrowser]) +def kill(context): + pass + + +# ##### LOCAL ##### + + +@task +def setup_local(context): + context.run("pip install -r requirements/pip.txt") + + +@task +def lint(context): + context.run("flake8 .") + context.run("mypy .") + + +@task +def install(context): + context.run("python setup.py install --user") + + +# ##### MISC ##### + + +@task +def ps(context): + context.run(f"neuro ps")