-
Notifications
You must be signed in to change notification settings - Fork 134
/
.make.defaults
723 lines (654 loc) · 32 KB
/
.make.defaults
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
########################################################################################
# Include this to get access to a common set of rules for reuse in Makefile-based projects.
# include ../../.make.defaults
#
# Before including, the following must be defined:
# REPOROOT points to the top of the git repository.
# For example REPOROOT=../../..
# DOCKER_IMAGE_NAME - defines the name of the docker image
# For example DOCKER_IMAGE_NAME=noop
# EXTRA_INDEX_URL - can define an additional pipy index for use
# in creating venv and images. This will be used on pip installs
# when building the venv and will be made available as
# a docker build argument when building the docker image.
# This is useful when the repo is used in conjunction with an
# inhouse or private pypi.
#
# Targets defined here use double colon so can be overriden
#
# Reusable rules begin with '.'. To reuse without modification, for example,
# define your local Makefile rule as follows:
# clean: .clean
# To augment the the clean rule
# clean: .clean
# rm -rf other-stuff
#######################################################################################
SHELL=/bin/bash
include $(REPOROOT)/.make.versions
# Command to run python
PYTHON?=python
PIP=$(PYTHON) -m pip
# Command to run pytest
PYTEST=pytest -s
PYTHON_VERSION=$(shell $(PYTHON) --version)
ABS_REPOROOT=$(shell (cd $(REPOROOT); pwd))
DOCKER_FILE?=Dockerfile
#DOCKER_IMAGE_NAME?=xyzzy # Must be defined by the includeing makefile
DOCKER?=docker
DOCKER_PLATFORM?=linux/amd64
# Can be used by transforms or others to add args to the "docker build" command in .defaults.image target
DOCKER_BUILD_EXTRA_ARGS?=
# Can be used by transforms or others to add args to the "pip install" commands referencing toml or requirements.txt files
PIP_INSTALL_EXTRA_ARGS?=
DOCKER_HOSTNAME?=quay.io
DOCKER_NAMESPACE ?= dataprep1/data-prep-kit
DOCKER_REGISTRY_USER?=$(DPK_DOCKER_REGISTRY_USER)
DOCKER_REGISTRY_KEY?=$(DPK_DOCKER_REGISTRY_KEY)
DOCKER_REGISTRY_ENDPOINT?=$(DOCKER_HOSTNAME)/$(DOCKER_NAMESPACE)
DOCKER_LOCAL_IMAGE=$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_VERSION)
DOCKER_REMOTE_IMAGE=$(DOCKER_REGISTRY_ENDPOINT)/$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_VERSION)
DOCKER_SPARK_BASE_IMAGE_NAME=data-prep-kit-spark-$(SPARK_VERSION)
DOCKER_SPARK_BASE_IMAGE=$(DOCKER_SPARK_BASE_IMAGE_NAME):$(DOCKER_IMAGE_VERSION)
RAY_BASE_IMAGE?=docker.io/rayproject/ray:${RAY}-py310
# Deprecated in favor of DOCKER_REMOTE_IMAGE
DOCKER_IMAGE?=$(DOCKER_REMOTE_IMAGE)
KIND_CLUSTER_NAME=dataprep
ARTIFACTS_DIR?=$(REPOROOT)/artifacts
# Set this to 0 to disable installation of data-processing-lib/* into virtual environments
# Should cause data-prep-kit dependencies to be loaded from pypi instead.
USE_REPO_LIB_SRC?=1
DPK_PYTHON_LIB_DIR=$(REPOROOT)/data-processing-lib/python
DPK_RAY_LIB_DIR=$(REPOROOT)/data-processing-lib/ray
DPK_SPARK_LIB_DIR=$(REPOROOT)/data-processing-lib/spark
DPK_PYTHON_BUILD_DIR=$(REPOROOT)/data-processing-lib
DPK_RAY_BUILD_DIR=$(REPOROOT)/data-processing-lib
DPK_SPARK_BUILD_DIR=$(REPOROOT)/data-processing-lib
KFPv2?=0
#######################################################################################
# Lists all targets and optional help text found in the target.
# Adapted from https://stackoverflow.com/a/65243296/45375
# and modified to support help at the top or middle of the recursive tree and at
# the leaves (the head and tail piece).
.PHONY: help
.newhelp::
# Expects a line of the following in the rule
@printf "%-15s %s\n" "Target" "Description"
@printf "%-15s %s\n" "--------------" "------------------------"
@make -pqR : 2>/dev/null | grep '@# Help:' | grep -v make | grep -v IGNORE | sed -e 's/.*Help://' -e 's/[ ]*//' | \
awk -F, '{printf("%-15s%s\n",$$1,$$2)}'
# Gnu awk requires running awk in 2 steps, below.
help::
@printf "%-20s %s\n" "Target" "Description"
@printf "%-20s %s\n" "------" "-----------"
@export submakes=$$(find ./* -mindepth 1 -maxdepth 1 -name Makefile); \
make -pqR : 2>/dev/null \
| awk '/^# File/,/^# Finished Make data base/ {print $$0}' \
| awk -v RS= -F: '{if ($$1 !~ "^[#.]") {print $$1}}' \
| sort \
| egrep -v -e '^[^[:alnum:]]' -e '^$@$$' \
| xargs -I _ sh -c 'printf "%-20s " _; make _ -nB 2>/dev/null | (grep -i "^# Help:" || echo "") \
| if [ -z "$$submakes" ]; then \
tail -1; \
else \
head -1; \
fi \
| sed "s/^# Help: //g"' | uniq
#################################################################################################################
#
# This provides common support for all Makefiles in the project.
# It enables the processing of a common set of rules on all sub-projects underneath this directory.
# Currently, the common/standardized set of rules are as follows and supported by makefile.include
#
# clean:
# setup:
# build:
# test:
#
# When finally getting to a makefile that requires a rule implementation, for example to test the build,
# that makefile should override/implement the rule to meet its needs. Such a rule may continue to recurse
# using "$(MAKE) <rule>-recurse", for example "$(MAKE) test-recurse".
#
# Each rule is called recursively on sub-directories and if a similar inclusion is done in the sub-Makefiles,
# the rules will be applied/executed recursively in their sub-directories. By default, sub-directories
# are traversed in lexigraphic order. If you need to control the ordering, create a .make.subdirs
# listing the sub-directories to process and their order.
# Rule to recurse into subdirectories.
# Usage: $(MAKE) RULE=yourrule .recurse
.PHONY: .recurse
.recurse::
@if [ -z "$(RULE)" ]; then \
echo RULE must be specified to use the .recurse target; \
exit 1; \
fi
@if [ -e .make.subdirs ]; then \
SUB_MAKE_DIRS=$$(cat .make.subdirs | sed -e 's/^#.*//' -e 's/ //g') ; \
if [ -z "$$SUB_MAKE_DIRS" ]; then \
SUB_MAKE_DIRS=$$(echo */ | sort); \
fi; \
else \
SUB_MAKE_DIRS=$$(echo */ | sort); \
fi; \
echo SUB_MAKE_DIRS=$$SUB_MAKE_DIRS; \
if [ ! -z "$$SUB_MAKE_DIRS" ]; then \
for i in $$SUB_MAKE_DIRS; do \
if [ -e $$i/Makefile ]; then \
echo Using recursive $(RULE) rule in $$i; \
(cd $$i; $(MAKE) $(RULE)); \
status=$$?; \
if [ $$status -ne 0 ]; then \
exit $$status; \
fi; \
else \
echo No Makefile found in $$i. Skipping.; \
fi; \
done; \
fi
PWD=$(shell pwd)
# This rule allows sub-directories to override/define rule X for each X-recurse rule above.
# See https://stackoverflow.com/questions/11958626/make-file-warning-overriding-commands-for-target
%:: .%-recurse
@echo Finished executing rule $@ recursively in $(PWD) > /dev/null
#######################################################################################
check_defined = \
$(strip $(foreach 1,$1, \
$(call __check_defined,$1,$(strip $(value 2)))))
__check_defined = \
$(if $(value $1),, \
$(error Undefined $1$(if $2, ($2))))
.PHONY: .check_python_version
.check_python_version::
@version=$$(echo $(PYTHON_VERSION) | sed -e 's/Python[ ]*//'); \
major=$$(echo $$version | awk -F. '{print $$1}'); \
minor=$$(echo $$version | awk -F. '{print $$2}'); \
if [ $$major -lt 3 -o $$minor -lt 10 -o $$minor -gt 12 ]; then \
echo Python 3.10 - 3.12 is required, but got $$version; \
echo Try overriding PYTHON=$(PYTHON). For example, "'"make PYTHON=python3.10" ...'"; \
exit 1; \
fi
.PHONY: .defaults.clean
.defaults.clean::
@# Help: Clean up the virtual environment.
rm -rf venv > /dev/null 2>&1
# This are created during image builds and may get left over if killed in the middle
-rm -rf data-processing-lib-python > /dev/null 2>&1
-rm -rf data-processing-lib-ray> /dev/null 2>&1
-rm -rf data-processing-lib-spark > /dev/null 2>&1
# Left over python stuff
-find . -name '*.egg-info' | xargs rm -rf
-find . -name '__pycache__' | xargs rm -rf
-rm -rf dist
-rm -rf $(ARTIFACTS_DIR) > /dev/null 2>&1
.PHONY: .defaults.docker-save-image
.defaults.docker-save-image:
@# Help: Save docker image as tar file to ${ARTIFACTS_DIR} directory.
mkdir -p ${ARTIFACTS_DIR}/
docker save -o ${ARTIFACTS_DIR}/${DOCKER_IMAGE_NAME}.tar $(DOCKER_REMOTE_IMAGE)
.PHONY: .defaults.docker-load-image
.defaults.docker-load-image:
@# Help: load docker image from ${ARTIFACTS_DIR} directory.
docker load -i ${ARTIFACTS_DIR}/${DOCKER_IMAGE_NAME}.tar
# We create both local and remote tags. Local seems to be needed when using our spark
# base image. Remote seems to be needed by kfp.
.PHONY: .defaults.image
.defaults.image:: # Must be called with a DOCKER_IMAGE= settings.
@# Help: Create the docker image $(DOCKER_LOCAL_IMAGE) and a tag for $(DOCKER_REMOTE_IMAGE)
$(call check_defined, DOCKER_IMAGE_NAME)
# The following touch seems to be needed to work around a docker build problem in which
# it seemed to be using a pyproject from a previously built image. Specifically, malware's
# ray pyproject.toml seemed to be copied into the image instead of the proglang_select python version.
# This could not be reproduced locally and was only seen when running as the build-image.yml workflow
#if [ -e pyproject.toml ]; then \
# cat pyproject.toml; \
#fi
if [ -e pyproject.toml ]; then \
touch pyproject.toml; \
fi
$(DOCKER) build -t $(DOCKER_LOCAL_IMAGE) $(DOCKER_BUILD_EXTRA_ARGS) \
--platform $(DOCKER_PLATFORM) \
--build-arg EXTRA_INDEX_URL=$(EXTRA_INDEX_URL) \
--build-arg BASE_IMAGE=$(BASE_IMAGE) \
--build-arg DPK_WHEEL_FILE_NAME=$(DPK_WHEEL_FILE_NAME) \
--build-arg BUILD_DATE=$(shell date -u +'%Y-%m-%dT%H:%M:%SZ') \
--build-arg GIT_COMMIT=$(shell git log -1 --format=%h) .
$(DOCKER) tag $(DOCKER_LOCAL_IMAGE) $(DOCKER_REMOTE_IMAGE)
# Copy a source tree in LIB_PATH, including src, pyproject.toml to LIB_NAME
# Generally used to copy source from within the repo into a local directory for use by a Dockerfile
.PHONY: .defaults.copy-lib
.defaults.copy-lib:
rm -rf ${LIB_NAME}
mkdir ${LIB_NAME}
cp -p -R ${LIB_PATH}/src ${LIB_NAME}
cp -p -R ${LIB_PATH}/pyproject.toml ${LIB_NAME}
cp -p -R ${LIB_PATH}/README.md ${LIB_NAME}
if [ -e ${LIB_PATH}/requirements.txt ]; then \
cp -p ${LIB_PATH}/requirements.txt ${LIB_NAME}; \
fi
# Build and image using the local Dockerfile and make the data-processing-lib/python
# available in the current directory for use by the Dockerfile (i.e. to install the library).
#.PHONY: .defaults.python-lib-src-image
#.defaults.python-lib-src-image:: # Must be called with a DOCKER_LOCAL_IMAGE= settings.
# @# Help: Build the Python $(DOCKER_LOCAL_IMAGE) using the $(DOCKER_FILE), requirements.txt and data-processing-lib/python source
#ifeq ($(USE_REPO_LIB_SRC), 1)
# $(MAKE) LIB_PATH=$(DPK_PYTHON_LIB_DIR) LIB_NAME=data-processing-lib-python .defaults.copy-lib
#endif
# $(MAKE) DOCKER_IMAGE=$(DOCKER_LOCAL_IMAGE) .defaults.image
# -rm -rf data-processing-lib-python
.PHONY: .default.build-lib-wheel
.default.build-lib-wheel:
make -C $(REPOROOT)/data-processing-lib build-pkg-dist
rm -rf data-processing-dist && mkdir data-processing-dist
cp $(REPOROOT)/data-processing-lib/dist/*.whl data-processing-dist
# Build and image using the local Dockerfile and make the wheel for data-processing-lib
# available in the current directory for use by the Dockerfile (i.e. to install the library).
.PHONY: .defaults.python-lib-whl-image
.defaults.python-lib-whl-image:: .default.build-lib-wheel
# Must be called with a DOCKER_LOCAL_IMAGE= settings.
@# Help: Build the Python $(DOCKER_LOCAL_IMAGE) using the the wheel file for the library
@$(eval LIB_WHEEL_FILE := $(shell find data-processing-dist/*.whl))
$(eval LIB_WHEEL_FILE := $(shell basename $(LIB_WHEEL_FILE)))
$(MAKE) DOCKER_IMAGE=$(DOCKER_LOCAL_IMAGE) DPK_WHEEL_FILE_NAME=$(LIB_WHEEL_FILE) .defaults.image
-rm -rf data-processing-dist
# Build an image using the local Dockerfile and make the data-processing-lib/ray
# available in the current directory for use by the Dockerfile (i.e. to install the library).
# Note that this looks for the ../python directory, which is currently only used in the transform projects,
# but we add it here as a convenience to avoid duplicating a lot of this in transforms/.make.transforms.
#.PHONY: .defaults.ray-lib-src-image
#.defaults.ray-lib-src-image:: # Must be called with a DOCKER_LOCAL_IMAGE= settings.
# @# Help: Build the Ray $(DOCKER_LOCAL_IMAGE) using the $(DOCKER_FILE), requirements.txt and data-processing-libs source
#ifeq ($(USE_REPO_LIB_SRC), 1)
# $(MAKE) LIB_PATH=$(DPK_PYTHON_LIB_DIR) LIB_NAME=data-processing-lib-python .defaults.copy-lib
# $(MAKE) LIB_PATH=$(DPK_RAY_LIB_DIR) LIB_NAME=data-processing-lib-ray .defaults.copy-lib
#endif
# if [ -e ../python ]; then \
# $(MAKE) LIB_PATH=../python LIB_NAME=python-transform .defaults.copy-lib; \
# fi
# $(MAKE) DOCKER_IMAGE=$(DOCKER_LOCAL_IMAGE) .defaults.image
# -rm -rf data-processing-lib-python
# -rm -rf data-processing-lib-ray
# -rm -rf python-transform
# Build an image using the local Dockerfile and make the data-processing wheel
# available in the current directory for use by the Dockerfile (i.e. to install the library).
# Note that this looks for the ../python directory, which is currently only used in the transform projects,
# but we add it here as a convenience to avoid duplicating a lot of this in transforms/.make.transforms.
.PHONY: .defaults.ray-lib-whl-image
.defaults.ray-lib-whl-image:: .default.build-lib-wheel
# Must be called with a DOCKER_LOCAL_IMAGE= settings.
@# Help: Build the Ray $(DOCKER_LOCAL_IMAGE) using the $(DOCKER_FILE) and library wheel
@$(eval LIB_WHEEL_FILE := $(shell find data-processing-dist/*.whl))
$(eval LIB_WHEEL_FILE := $(shell basename $(LIB_WHEEL_FILE)))
if [ -e ../python ]; then \
$(MAKE) LIB_PATH=../python LIB_NAME=python-transform .defaults.copy-lib; \
fi
$(MAKE) DOCKER_IMAGE=$(DOCKER_LOCAL_IMAGE) DPK_WHEEL_FILE_NAME=$(LIB_WHEEL_FILE) .defaults.image
-rm -rf python-transform
-rm -rf data-processing-dist
# Build the base spark image used by spark-based transforms
.PHONY: .defaults.spark-lib-base-image
.defaults.spark-lib-base-image:
$(MAKE) -C $(DPK_SPARK_LIB_DIR) image
# Note that this looks for the ../python directory, which is currently only used in the transform projects,
# but we add it here as a convenience to avoid duplicating a lot of this in transforms/.make.transforms.
# Must be called with a DOCKER_LOCAL_IMAGE= settings.
#.PHONY: .defaults.spark-lib-src-image
#.defaults.spark-lib-src-image:: .defaults.spark-lib-base-image
# @# Help: Build the Spark $(DOCKER_LOCAL_IMAGE) using the $(DOCKER_FILE), requirements.txt and data-processing-libs source
# $(MAKE) IMAGE_NAME_TO_VERIFY=$(DOCKER_SPARK_BASE_IMAGE_NAME) .defaults.verify-image-availability
#ifeq ($(USE_REPO_LIB_SRC), 1)
# $(MAKE) LIB_PATH=$(DPK_PYTHON_LIB_DIR) LIB_NAME=data-processing-lib-python .defaults.copy-lib
# $(MAKE) LIB_PATH=$(DPK_SPARK_LIB_DIR) LIB_NAME=data-processing-lib-spark .defaults.copy-lib
#endif
# if [ -e ../python ]; then \
# $(MAKE) LIB_PATH=../python LIB_NAME=python-transform .defaults.copy-lib; \
# fi
# $(MAKE) DOCKER_IMAGE=$(DOCKER_LOCAL_IMAGE) BASE_IMAGE=$(DOCKER_SPARK_BASE_IMAGE) .defaults.image
# -rm -rf data-processing-lib-python
# -rm -rf data-processing-lib-spark
# -rm -rf python-transform
.PHONY: .defaults.spark-lib-whl-image
.defaults.spark-lib-whl-image:: .default.build-lib-wheel .defaults.spark-lib-base-image
# Must be called with a DOCKER_LOCAL_IMAGE= settings.
@# Help: Build the Ray $(DOCKER_LOCAL_IMAGE) using the $(DOCKER_FILE) and library wheel
$(MAKE) IMAGE_NAME_TO_VERIFY=$(DOCKER_SPARK_BASE_IMAGE_NAME) .defaults.verify-image-availability
@$(eval LIB_WHEEL_FILE := $(shell find data-processing-dist/*.whl))
$(eval LIB_WHEEL_FILE := $(shell basename $(LIB_WHEEL_FILE)))
if [ -e ../python ]; then \
$(MAKE) LIB_PATH=../python LIB_NAME=python-transform .defaults.copy-lib; \
fi
$(MAKE) DOCKER_IMAGE=$(DOCKER_LOCAL_IMAGE) BASE_IMAGE=$(DOCKER_SPARK_BASE_IMAGE) DPK_WHEEL_FILE_NAME=$(LIB_WHEEL_FILE) .defaults.image
-rm -rf python-transform
-rm -rf data-processing-dist
# Install the source from the given directory into an existing venv
# Expected PYTHON_PROJECT_DIR and uses EXTRA_INDEX_URL if set.
# PYTHON_PROJECT_DIR is expected to have src and pyproject.toml
.PHONY: .defaults.install-src-venv
.defaults.install-src-venv::
@echo Begin installing source from $(PYTHON_PROJECT_BUILD_DIR) into venv
$(call check_defined, PYTHON_PROJECT_BUILD_DIR)
@source venv/bin/activate; \
if [ ! -z "$(EXTRA_INDEX_URL)" ]; then \
extra_url='--extra-index-url $(EXTRA_INDEX_URL)'; \
fi; \
if [ -e $(PYTHON_PROJECT_BUILD_DIR)/requirements.txt ]; then \
pip install -r $(PYTHON_PROJECT_BUILD_DIR)/requirements.txt; \
fi; \
if [ -e $(PYTHON_PROJECT_BUILD_DIR)/pyproject.toml ]; then \
if [ -z "$(PROJECT_BUILD_EXTRA)" ]; then \
pip install $(PIP_INSTALL_EXTRA_ARGS) $${extra_url} -e $(PYTHON_PROJECT_BUILD_DIR); \
else \
pip install $(PIP_INSTALL_EXTRA_ARGS) $${extra_url} -e $(PYTHON_PROJECT_BUILD_DIR)[$(PROJECT_BUILD_EXTRA)]; \
fi;\
fi
@echo Done installing source from $(PYTHON_PROJECT_BUILD_DIR) into venv
# Install local requirements last as it generally includes our lib source
.PHONY: .defaults.python-lib-src-venv
.defaults.python-lib-src-venv:: .defaults.create-venv .defaults.install-python-lib-src-venv .defaults.install-local-requirements-venv
# Provided to work around issue #274 in which make seems to be running the command despite -n make argument
# This resulted in corrupting pre-existing venv.
# Expects PIP_TARGET
.PHONY: .defaults.pip-uninstall
.defaults.pip-uninstall:
$(call check_defined, PIP_TARGET)
pip uninstall -y $(PIP_TARGET)
# Provided for similar reasons as the .defaults.pip-uninstall target.
# BUT NO, this since to cause pytest to be install adjacent to the venv directory?
# Expects PIP_TARGET
.PHONY: .defaults.pip-install-NOT-USED-YET
.defaults.pip-install:
$(call check_defined, PIP_TARGET)
pip install $(PIP_TARGET)
# Install all source from the repo for a python runtime transform into an existing venv
.PHONY: .defaults.install-python-lib-src-venv
.defaults.install-python-lib-src-venv::
ifeq ($(USE_REPO_LIB_SRC), 1)
@# Help: Install Python data processing library source into existing venv
@echo Installing Python data processing library source to existing venv
@source venv/bin/activate; \
$(MAKE) PIP_TARGET=data-prep-toolkit .defaults.pip-uninstall; \
$(MAKE) PYTHON_PROJECT_BUILD_DIR=$(DPK_PYTHON_BUILD_DIR) .defaults.install-src-venv; \
echo Installed source from Python processing library for `which $(PYTHON)`
else
@# Help: DO NOT install Python data processing library source into existing venv
@echo USE_REPO_LIB_SRC!=1 so do NOT installing Python data processing library source into existing venv
endif
# Install local requirements last as it generally includes our lib source
.PHONY: .defaults.ray-lib-src-venv
.defaults.ray-lib-src-venv:: .defaults.create-venv .defaults.install-ray-lib-src-venv .defaults.install-local-requirements-venv
@# Help: Create the venv and install Ray library source, local dependencies and adjacent python source if present.
# Install local requirements last as it generally includes our lib source
.PHONY: .defaults.kfp-venv
.defaults.kfp-venv:: .defaults.create-venv .defaults.install-ray-lib-src-venv
@# Help: Create the venv and install Ray library source, local dependencies and adjacent python source if present.
# Install all source from the repo for a ray runtime transform into an existing venv
# And if there is an adjacent python dir (as for transforms), then also install that source
.PHONY: .defaults.install-ray-lib-src-venv
.defaults.install-ray-lib-src-venv::
ifeq ($(USE_REPO_LIB_SRC), 1)
@# Help: Install Ray and Python data processing library source into existing venv
@echo Installing Ray and Python data processing library source to existing venv
@source venv/bin/activate; \
$(MAKE) PIP_TARGET=data-prep-toolkit[ray] .defaults.pip-uninstall; \
$(MAKE) PIP_TARGET=data-prep-toolkit .defaults.pip-uninstall; \
$(MAKE) PYTHON_PROJECT_BUILD_DIR=$(DPK_PYTHON_BUILD_DIR) .defaults.install-src-venv; \
$(MAKE) PYTHON_PROJECT_BUILD_DIR=$(DPK_RAY_BUILD_DIR) PROJECT_BUILD_EXTRA=ray .defaults.install-src-venv; \
echo Installed source from Python and Ray data processing libraries for `which $(PYTHON)`
else
@# Help: DO NOT install Python or Ray data processing library source into existing venv
@echo USE_REPO_LIB_SRC!=1 so do NOT installing Python or Ray data processing library source into existing venv
endif
# Install the module python library if it has one
@if [ -d ../python ]; then \
source venv/bin/activate; \
$(MAKE) PYTHON_PROJECT_BUILD_DIR=../python .defaults.install-src-venv; \
fi
# Install local requirements last as it generally includes our lib source
.PHONY: .defaults.spark-lib-src-venv
.defaults.spark-lib-src-venv:: .defaults.create-venv .defaults.install-spark-lib-src-venv .defaults.install-local-requirements-venv
@# Help: Create the venv and install Spark library source and local dependencies.
# Install the python-based lib BEFORE spark assuming spark depends on the same version as python source.
.PHONY: .defaults.install-spark-lib-src-venv
.defaults.install-spark-lib-src-venv::
ifeq ($(USE_REPO_LIB_SRC), 1)
@# Help: Install Spark and Python data processing library source into existing venv
@echo Installing Spark and Python data processing library source to existing venv
@source venv/bin/activate; \
$(MAKE) PIP_TARGET=data-prep-toolkit[spark] .defaults.pip-uninstall; \
$(MAKE) PIP_TARGET=data-prep-toolkit .defaults.pip-uninstall; \
$(MAKE) PYTHON_PROJECT_BUILD_DIR=$(DPK_PYTHON_BUILD_DIR) .defaults.install-src-venv; \
$(MAKE) PYTHON_PROJECT_BUILD_DIR=$(DPK_SPARK_BUILD_DIR) PROJECT_BUILD_EXTRA=spark .defaults.install-src-venv; \
echo Installed source from Python and Spark processing libraries for `which $(PYTHON)`
else
@# Help: DO NOT install Python or Spark data processing library source into existing venv
@echo USE_REPO_LIB_SRC!=1 so do NOT installing Python or Spark data processing library source into existing venv
endif
if [ -d ../python ]; then \
source venv/bin/activate; \
$(MAKE) PYTHON_PROJECT_BUILD_DIR=../python .defaults.install-src-venv; \
fi
# Run tests in test directory from that dir after adding ../src to PYTHONPATH
# Assumes a Makefile target of venv to create the venv
.PHONY: .defaults.test-src
.defaults.test-src:: venv
@# Help: Run pytest on the test directory inside the venv
source venv/bin/activate; \
export PYTHONPATH=../src:../: ; \
cd test; $(PYTEST) .
# This is small convenience and the image itself must already be created.
.PHONY: .defaults.test-image-pytest
.defaults.test-image-pytest::
# Put this 2nd so its help showss up instead of .defaults.image help
@# Help: Test $(DOCKER_LOCAL_IMAGE) using test source inside the image.
$(DOCKER) run -t --rm $(DOCKER_LOCAL_IMAGE) pytest -s test
.PHONY: .defaults.test-locals
.defaults.test-locals::
@# Help: Run the *local*.py files in the src directory
@source venv/bin/activate; \
export PYTHONPATH=src; \
files=$$(find src -name '*local*.py' ); \
for i in $$files; do \
echo Executing: python $$i; \
python $$i; \
if [ $$? -ne 0 ]; then \
exit 1; \
fi; \
done
# Expects RUN_FILE RUN_ARGS
.PHONY: .defaults.run-src-file
.defaults.run-src-file:
@# Help: Run $(RUN_FILE) (if it exists).
$(call check_defined, RUN_FILE)
$(call check_defined, RUN_ARGS)
@if [ ! -e "src/$(RUN_FILE)" ]; then \
echo ""; \
echo src/$(RUN_FILE) does not exist.; \
echo ""; \
exit 1; \
fi
@if [ ! -d "venv" ]; then \
echo ""; \
echo "Please 'make venv' before running $(RUN_FILE)."; \
echo ""; \
exit 1; \
fi
source venv/bin/activate; \
cd src; \
python $(RUN_FILE) $(RUN_ARGS)
# This expects the image to already be built and so does not depending on .defaults.publish-image.
# This allows others to define their own image building prior to publishing.
.PHONY: .defaults.publish-image
.defaults.publish-image::
@# Help: Publish the $(DOCKER_LOCAL_IMAGE) to $(DOCKER_HOSTNAME) container registry
$(call check_defined, DOCKER_IMAGE_NAME)
-$(DOCKER) logout $(DOCKER_HOSTNAME)
$(DOCKER) login $(DOCKER_HOSTNAME) -u '$(DOCKER_REGISTRY_USER)' -p '$(DOCKER_REGISTRY_KEY)'
$(DOCKER) push $(DOCKER_REMOTE_IMAGE)
# Create the local virtual environment, assuming python is already installed and available
# We upgrade pip as that seems to be required by watson_nlp
# We install wheel, because it seems to be required for fasttext install on redhat.
# We use "pip" instead of "$(PIP)" below because otherwise if the user has overriddent PYTHON
# they will end up installing into that PYTHON and NOT the venv.
.PHONY: .defaults.venv
.defaults.venv: .defaults.create-venv .defaults.install-local-requirements-venv
.PHONY: .defaults.create-venv
.defaults.create-venv: .check_python_version
@# Help: Create the virtual environment using requirements.txt or pyproject.toml
$(PYTHON) -m venv venv
@source venv/bin/activate; \
pip install --upgrade pip; \
pip install wheel pytest pytest-cov;
# Install requirements defined in the current directory into an existing venv
.PHONY: .defaults.install-local-requirements-venv
.defaults.install-local-requirements-venv:
@source venv/bin/activate; \
if [ ! -z "$(EXTRA_INDEX_URL)" ]; then \
extra_url='--extra-index-url $(EXTRA_INDEX_URL)'; \
fi; \
if [ -e requirements.txt ]; then \
echo Installing requirements from requirements.txt; \
pip install $(PIP_INSTALL_EXTRA_ARGS) $$extra_url -r requirements.txt; \
fi; \
if [ -e pyproject.toml ]; then \
echo Installing from pyproject.toml; \
pip install $(PIP_INSTALL_EXTRA_ARGS) $$extra_url -e .; \
fi
.PHONY: .defaults.check.installed
.defaults.check.installed::
@if [ ! command -v $(CHECK_RUNNABLE) &>/dev/null ]; then \
echo $(CHECK_RUNNABLE) must be installed; \
exit 1; \
fi
.PHONY: .defaults.minio.check
.defaults.minio.check::
@# Help: Check that minio and the mc CLI is installed.
$(MAKE) CHECK_RUNNABLE=minio .defaults.check.installed
$(MAKE) CHECK_RUNNABLE=mc .defaults.check.installed
MINIO_DIR=/tmp/data-prep-kit
MINIO_ALIAS=local
# These are the credentials used by samples.
MINIO_ADMIN_USER=localminioaccesskey
MINIO_ADMIN_PWD= localminiosecretkey
.PHONY: .defaults.minio.verify-running
.defaults.minio.verify-running:
@z=$$(ps -elf | grep minio | grep -v grep); \
if [ -z "$$z" ]; then \
echo ""; \
echo "Please start minio (see make help)"; \
echo ""; \
exit 1; \
fi
.PHONY: .defaults.minio.stop
.defaults.minio.stop::
$(MAKE) .defaults.minio.check
@# Help: Stop the minio server used for S3-based samples
-mc admin service stop $(MINIO_ALIAS) > /dev/null 2>&1
-mc alias rm $(MINIO_ALIAS) > /dev/null 2>&1
.PHONY: .defaults.minio.start
.defaults.minio.start::
$(MAKE) .defaults.minio.stop
@# Help: Stop, then start, the minio server used for S3-based samples
rm -rf $(MINIO_DIR)
mkdir $(MINIO_DIR)
echo Starting minio server
minio server $(MINIO_DIR) &
sleep 1
-mc alias rm $(MINIO_ALIAS) > /dev/null 2>&1
mc alias set $(MINIO_ALIAS) http://127.0.0.1:9000 minioadmin minioadmin
mc admin user svcacct add --access-key "$(MINIO_ADMIN_USER)" --secret-key "$(MINIO_ADMIN_PWD)" $(MINIO_ALIAS) minioadmin
mc alias ls $(MINIO_ALIAS)
@echo "Test minio server started"
.PHONY: .defaults.minio.load-test-data
.defaults.minio.load-test-data::
@# Help: Load the minio server with S3 sample data from $(MINIO_SRC) to $(MINIO_DEST)
@if [ -z "$(MINIO_SRC)" -o -z "$(MINIO_DEST)" ]; then \
echo MINIO_SRC and MINIO_DEST must be defined when use $? target;\
exit 1; \
fi
$(MAKE) .defaults.minio.check
@# Help: Load $(MINIO_SRC) into minio at local/test/$(MINIO_DEST)
-mc mb local/test > /dev/null 2>&1 # Igore if it already exists
mc cp --recursive $(MINIO_SRC) local/test/$(MINIO_DEST)
# Changes the version field of the pyproject.toml file to the given version
# and update the referenced library versions as defined in .make.versions.
# Expects TOML_VERSION
.PHONY: .defaults.update-toml
.defaults.update-toml:
$(call check_defined, TOML_VERSION)
if [ -e pyproject.toml ]; then \
$(MAKE) TOML_VERSION=$(TOML_VERSION) .defaults.__set-toml-version; \
$(MAKE) .defaults.__update-toml-lib-dep-versions; \
$(MAKE) .defaults.__update-toml-python-versions; \
fi
# Changes the version field of the pyproject.toml file to the given version
# Expects TOML_VERSION
.PHONY: .defaults.__set-toml-version
.defaults.__set-toml-version:
@# Help: Set the version= field of pyproject.toml
if [ -e pyproject.toml ]; then \
cat pyproject.toml | sed -e \
's/^version[ ]*=.*/version = "'${TOML_VERSION}'"/' \
> tt.toml; \
mv tt.toml pyproject.toml; \
fi
# Updates the Python supported versions field of the pyproject.toml file to the given versions
# Expects REQUIRED_PYTHON_VERSIONS
.PHONY: .defaults.__update-toml-python-versions
.defaults.__update-toml-python-versions:
@# Help: Set the version= field of pyproject.toml
if [ -e pyproject.toml ]; then \
cat pyproject.toml | sed -e \
's/^requires-python[ ]*=.*"/requires-python = "'${REQUIRED_PYTHON_VERSIONS}'"/' \
> tt.toml; \
mv tt.toml pyproject.toml; \
fi
# Updates the versions references to our repo source as defined in .make.versions
.PHONY: .defaults.__update-toml-lib-dep-versions
.defaults.__update-toml-lib-dep-versions:
ifeq ($(USE_REPO_LIB_SRC), 1)
@# Help: Update pyproject.toml to depend on lib versions defined in .make.versions
if [ -e pyproject.toml ]; then \
cat pyproject.toml | sed \
-e 's/"data-prep-toolkit\[ray\]\([=><~][=]\).*"/"data-prep-toolkit[ray]\1$(DPK_LIB_VERSION)"/' \
-e 's/"data-prep-toolkit-ray\([=><~][=]\).*"/"data-prep-toolkit-ray\1$(DPK_LIB_VERSION)"/' \
-e 's/"data-prep-toolkit-spark\([=><~][=]\).*"/"data-prep-toolkit-spark\1$(DPK_LIB_VERSION)"/' \
-e 's/"data-prep-toolkit-kfp\([=><~][=]\).*"/"data-prep-toolkit-kfp\1$(DPK_LIB_KFP_VERSION)"/' \
-e 's/"data-prep-toolkit\([=><~][=]\).*"/"data-prep-toolkit\1$(DPK_LIB_VERSION)"/' \
-e 's/"ray\[default\]\([=><~][=]\).*"/"ray\[default\]\1$(RAY)"/' \
-e 's/"data-prep-toolkit-kfp-shared\(..\).*"/"data-prep-toolkit-kfp-shared\1$(DPK_LIB_KFP_VERSION)"/' \
> tt.toml; \
mv tt.toml pyproject.toml; \
fi
if [ -e requirements.txt ]; then \
cat requirements.txt | sed \
-e 's/data-prep-toolkit\[ray\]\([=><~][=]\).*/data-prep-toolkit[ray]\1$(DPK_LIB_VERSION)/' \
-e 's/data-prep-toolkit-ray\([=><~][=]\).*/data-prep-toolkit-ray\1$(DPK_LIB_VERSION)/' \
-e 's/data-prep-toolkit-transforms\([=><~][=]\).*/data-prep-toolkit-transforms\1$(DPK_TRANSFORMS_VERSION)/' \
-e 's/data-prep-toolkit-spark\([=><~][=]\).*/data-prep-toolkit-spark\1$(DPK_LIB_VERSION)/' \
-e 's/data-prep-toolkit-kfp\([=><~][=]\).*/data-prep-toolkit-kfp\1$(DPK_LIB_KFP_VERSION)/' \
-e 's/data-prep-toolkit\([=><~][=]\).*/data-prep-toolkit\1$(DPK_LIB_VERSION)/' \
-e 's/ray\[default\]\([=><~][=]\).*/ray\[default\]\1$(RAY)/' \
-e 's/data-prep-toolkit-kfp-shared\(..\).*/data-prep-toolkit-kfp-shared\1$(DPK_LIB_KFP_VERSION)/' \
> tt.txt; \
mv tt.txt requirements.txt; \
fi
endif
# Build the distribution, usually in preparation for publishing using ith the .defaults.publish-dist target
.PHONY: .defaults.build-dist
.defaults.build-dist :
@# Help: Build the distribution for publishing to pypi
@if [ ! -e pyproject.toml ]; then \
echo ERROR: Building a distribution requires a local pyproject.toml file; \
exit 1; \
fi
rm -rf dist || true
rm -rf src/*egg-info || true
${PIP} install --upgrade build
${PYTHON} -m build $(BUILD_WHEEL_EXTRA_ARG)
# Publish the distribution in the dist directory, usually created with .defaults.build-dist target
.PHONY: .defaults.publish-dist
.defaults.publish-dist :
@# Help: Publish existing project distribution to pypi
@if [ ! -e dist ]; then \
echo ERROR: Publishing a distribution requires a local dist directory. Did you build?; \
exit 1; \
fi
${PYTHON} -m twine check dist/*
${PYTHON} -m twine upload --verbose --non-interactive dist/*