-
Notifications
You must be signed in to change notification settings - Fork 0
/
docker-compose.yml
73 lines (56 loc) · 2.28 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
## Compose runtime parametrization through env vars
# if NST_HOST_MOUNT is set to a valid path, then the container will mount that path
version: '3'
services:
# mkdir art
# NST_HOST_MOUNT="$PWD/art" docker-compose up
nst_demo:
image: boromir674/neural-style-transfer:1.0.2
volumes:
- ${NST_HOST_MOUNT:-./nst-algo-output}:/app/demo-output
command: [ "demo" ]
## NST Service, where the Pretrained Model Weights are made available to it (so that
# the code can read it), at build time (at docker build)
# nst-service:
# build:
# context: ./
# target: prod_ready
# dockerfile: Dockerfile
# # at build time we can override the default location (in host) from where the builder tries to copy the to image
# args:
# - IMAGE_MODEL=./pretrained_model_bundle/imagenet-vgg-verydeep-19.mat
# ports:
# - "5000:5000"
# volumes:
# - ./gen-images:/app/gen-images
# networks:
# - nst-network
### OPT 2
## NST Service, where the Pretrained Model Weights are made available to it (so that
# the code can read it) at runtime (at docker run -e).
# So, at runtime, when we sping up the container, we mount a directory from local
# filesystem, where the model weights are stored, to a directory inside the container.
# We also make sure that at runtime we set the AA_VGG_19 variable to the path to the
# model weights inside the container's filesystem. That way, the code can read the
# model weights from the container's filesystem.
# Note, that changes in mounted Volumes have two-way effectL local and in-container
# In other words as, the directory of the model weights is in a "live" mode.
# That might be an unwanting side-effect, but it is not a problem for this project.
# nst-service:
# build:
# context: .
# target: prod_install
# dockerfile: Dockerfile
# environment:
# AA_VGG_19: pretrained_model_bundle/imagenet-vgg-verydeep-19.mat
# volumes:
# - ./pretrained_model_bundle:/app/pretrained_model_bundle
# mkdir art && NST_HOST_MOUNT="$PWD/art" docker-compose up
# nst_demo_dev:
# build:
# context: ./
# # target: default_with_demo
# dockerfile: Dockerfile
# # OR just build: .
# volumes:
# - ${NST_HOST_MOUNT:-./nst-algo-output}:/app/demo-output