diff --git a/.gitignore b/.gitignore index 13b786d3..3527f668 100644 --- a/.gitignore +++ b/.gitignore @@ -49,5 +49,6 @@ plugins/rollbar/ src/plugins/**/build/ -# DB imports/exports +# import and export files for syncing between environments +sync sql diff --git a/README.md b/README.md index 31ee2003..ddd12be3 100644 --- a/README.md +++ b/README.md @@ -108,33 +108,33 @@ This theme uses the following files for linting: The preferred mechanism for syncing your environment with others is to use database imports and exports. This repo has a few scripts to make this process as easy as possible. While your containers are running, you can run any of these commands to import, export, or backup a database. Here are the quick commands, with more instructions below. ```sh -# import a DB from the `sql` folder -npm run import-db +# import a DB from the `sync/sql` folder +npm run import:db # export your DB -npm run export-db +npm run export:db # export your DB with a custom name -npm run export-db validation-data +npm run export:db validation-data # backup your DB in case you need to restore it later -npm run backup-db +npm run backup:db # backup your DB with a custom name -npm run backup-db work-in-progress +npm run backup:db work-in-progress ``` #### Importing Databases -You can import databases from production, a saved backup, or another developer's DB export with the `import-db` script. To use it, put a `*.sql.gz` file in a top-level `sql` folder in the repo and run `npm run import-db`. This will first back up your existing database in case you need to revert back to it, and then it will import the database from the given file, effectively replacing your database with a new one. +You can import databases from production, a saved backup, or another developer's DB export with the `import:db` script. To use it, put a `*.sql.gz` file in a top-level `sync/sql` folder in the repo and run `npm run import:db`. This will first back up your existing database in case you need to revert back to it, and then it will import the database from the given file, effectively replacing your database with a new one. #### Exporting Databases -You can export your database for another developer to import or to import to a staging environment by running `npm run export-db`. By default, this will create a timestamped and gzipped file in `sql/exports`, but you can specify a name by running `npm run export-db `. The exported file will still be timestamped, but it will use the name you give it instead of the default prefix. +You can export your database for another developer to import or to import to a staging environment by running `npm run export:db`. By default, this will create a timestamped and gzipped file in `sync/sql/exports`, but you can specify a name by running `npm run export:db `. The exported file will still be timestamped, but it will use the name you give it instead of the default prefix. #### Backing Up Databases -This will happen automatically when you import a database, but if you want to manually backup your database, you can run `npm run backup-db`. This functions nearly identically to the `export-db` script, except for using a different prefix and putting the file in `sql/backups`. As with `export-db`, you can specify a name for your DB backup if you want. +This will happen automatically when you import a database, but if you want to manually backup your database, you can run `npm run backup:db`. This functions nearly identically to the `export:db` script, except for using a different prefix and putting the file in `sync/sql/backups`. As with `export:db`, you can specify a name for your DB backup if you want. ### Atom diff --git a/package.json b/package.json index 1f629ada..f1047af3 100644 --- a/package.json +++ b/package.json @@ -31,10 +31,17 @@ "lint:twig": "./scripts/run.sh vendor/bin/twigcs src/php/views", "lint": "run-s lint:*", "php:run": "./scripts/run.sh", - "export-db": "./scripts/export-db.sh", - "backup-db": "BACKUP=true ./scripts/export-db.sh", - "import-db": "./scripts/import-db.sh", - "preimport-db": "npm run backup-db", + "export": "run-s \"export:* {1}\" --", + "backup": "run-s \"backup:* {1}\" --", + "import": "run-s import:*", + "export:db": "./scripts/export-db.sh", + "backup:db": "BACKUP=true ./scripts/export-db.sh", + "import:db": "./scripts/import-db.sh", + "export:uploads": "./scripts/export-uploads.sh", + "backup:uploads": "BACKUP=true ./scripts/export-uploads.sh", + "import:uploads": "./scripts/import-uploads.sh", + "preimport:db": "npm run backup:db", + "preimport:uploads": "npm run backup:uploads", "generate:custom-block": "node ./generators/custom-block.js", "generate:custom-blocks-plugin": "node ./generators/custom-blocks-plugin.js", "generate:page-template": "node ./generators/page-template.js", diff --git a/scripts/.gitkeep b/scripts/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/scripts/export-db.sh b/scripts/export-db.sh index bc5f447f..7e803b8e 100755 --- a/scripts/export-db.sh +++ b/scripts/export-db.sh @@ -3,12 +3,12 @@ export $(grep -v '^#' .env | xargs) timestamp=$(date -u +%Y-%m-%dT%H-%M-%S_%Z) -path='sql/exports' +path='sync/sql/exports' prefix='db-export' if [ $BACKUP ] then - path='sql/backups' + path='sync/sql/backups' prefix='db-backup' fi diff --git a/scripts/export-uploads.sh b/scripts/export-uploads.sh new file mode 100755 index 00000000..0d06ea9e --- /dev/null +++ b/scripts/export-uploads.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +export $(grep -v '^#' .env | xargs) + +timestamp=$(date -u +%Y-%m-%dT%H-%M-%S_%Z) +path='sync/uploads/exports' +prefix='uploads-export' + +if [ $BACKUP ] +then + path='sync/uploads/backups' + prefix='uploads-backup' +fi + +dirname=$prefix-$timestamp +filename=../$prefix-$timestamp.tar.gz + +if [ $1 ] +then + dirname=$1-$timestamp + filename=../$1-$timestamp.tar.gz +fi + +mkdir -p $path/$dirname +cp -rv uploads/ $path/$dirname +cd $path/$dirname +tar -czf $filename . +cd .. +rm -rf $dirname diff --git a/scripts/import-db.sh b/scripts/import-db.sh index e009ff1a..865edce4 100755 --- a/scripts/import-db.sh +++ b/scripts/import-db.sh @@ -2,26 +2,27 @@ export $(grep -v '^#' .env | xargs) -# copy the most recent .sql.gz file in the sql folder for import -cp "$(ls -t sql/*.sql.gz | head -1)" sql/db-import-raw.sql.gz +# copy the most recent .sql.gz file in the sync/sql folder for import +cp "$(ls -t sync/sql/*.sql.gz | head -1)" sync/sql/db-import-raw.sql.gz if [ $? -ne 0 ] then - echo "There must be at least one .sql.gz file in the sql folder to import" - exit 1 + echo "There must be at least one .sql.gz file in the sync/sql folder to import" + echo "Skipping database import" + exit 0 fi -gunzip sql/db-import-raw.sql.gz +gunzip sync/sql/db-import-raw.sql.gz # replace environment-specific URLs with localhost URL -sed "s/$SITE_URL/http:\/\/localhost:8000/g" sql/db-import-raw.sql > sql/db-import.sql +sed "s/$SITE_URL/http:\/\/localhost:8000/g" sync/sql/db-import-raw.sql > sync/sql/db-import.sql # drop existing database, create a new one, and load it up with data docker exec -i sparkpress_db mysql --user=$MYSQL_USER --password=$MYSQL_PASSWORD -e "drop database if exists $MYSQL_DATABASE" docker exec -i sparkpress_db mysql --user=$MYSQL_USER --password=$MYSQL_PASSWORD -e "create database $MYSQL_DATABASE" -docker exec -i sparkpress_db mysql --user=$MYSQL_USER --password=$MYSQL_PASSWORD $MYSQL_DATABASE < sql/db-import.sql +docker exec -i sparkpress_db mysql --user=$MYSQL_USER --password=$MYSQL_PASSWORD $MYSQL_DATABASE < sync/sql/db-import.sql # clean up files that aren't useful after import -rm sql/db-import* +rm sync/sql/db-import* -mkdir -p sql/previous-imports -mv sql/*.sql.gz sql/previous-imports +mkdir -p sync/sql/previous-imports +mv sync/sql/*.sql.gz sync/sql/previous-imports diff --git a/scripts/import-uploads.sh b/scripts/import-uploads.sh new file mode 100755 index 00000000..f37303c5 --- /dev/null +++ b/scripts/import-uploads.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +export $(grep -v '^#' .env | xargs) + +# copy the most recent .tar.gz file in the sync/uploads folder for import +cp "$(ls -t sync/uploads/*.tar.gz | head -1)" sync/uploads/uploads-import.tar.gz +if [ $? -ne 0 ] +then + echo "There must be at least one .tar.gz file in the sync/uploads folder to import" + echo "Skipping uploads import" + exit 0 +fi + +rm -rf uploads/* +mkdir -p uploads +tar -zxvf sync/uploads/uploads-import.tar.gz -C uploads +rm sync/uploads/uploads-import.tar.gz + +mkdir -p sync/uploads/previous-imports +mv sync/uploads/*.tar.gz sync/uploads/previous-imports