From 1a1c129b91aacc2535e6dce703e2e014bb917d22 Mon Sep 17 00:00:00 2001 From: Tim Sutton Date: Sat, 16 Dec 2023 10:44:17 +0200 Subject: [PATCH] Strip more stuff out for eng only build --- .github/workflows/nix-build.yml | 2 - README.md | 92 +---------- default.nix | 2 - english_build.sh | 91 +++++++++++ scripts/.htaccess | 6 - scripts/compress-images.sh | 6 - scripts/create_transifex_resources.sh | 77 --------- scripts/generate_keywords.py | 213 ------------------------- scripts/post_translate.sh | 172 -------------------- scripts/pre_translate.sh | 99 ------------ scripts/rst_generation_tools.py | 217 -------------------------- scripts/update_all_dev.sh | 8 - 12 files changed, 95 insertions(+), 890 deletions(-) create mode 100755 english_build.sh delete mode 100644 scripts/.htaccess delete mode 100755 scripts/compress-images.sh delete mode 100755 scripts/create_transifex_resources.sh delete mode 100644 scripts/generate_keywords.py delete mode 100755 scripts/post_translate.sh delete mode 100755 scripts/pre_translate.sh delete mode 100644 scripts/rst_generation_tools.py delete mode 100755 scripts/update_all_dev.sh diff --git a/.github/workflows/nix-build.yml b/.github/workflows/nix-build.yml index c19c35530..1865e52ae 100644 --- a/.github/workflows/nix-build.yml +++ b/.github/workflows/nix-build.yml @@ -11,6 +11,4 @@ jobs: with: nix_path: nixpkgs=channel:nixos-unstable - uses: DeterminateSystems/magic-nix-cache-action@v2 - - run: nix-shell --run "scripts/pre_translate.sh" - - run: nix-shell --run "scripts/post_translate.sh" - run: nix-shell --run "scripts/english_build.sh" diff --git a/README.md b/README.md index 9c827e3a0..f9e826b25 100644 --- a/README.md +++ b/README.md @@ -88,112 +88,28 @@ pip freeze You should see a list of packages similar to those listed below: ``` -pystac==1.9.0 -python-dateutil==2.8.2 six==1.16.0 ``` ## Building the docs -We are now going to use that inasafe-doc directory as source and output directory for the +We are now going to use that inasafe-doc directory as source and output directory for the HTML ``` -scripts/pre_translate.sh -scripts/post_translate.sh scripts/english_build.sh ``` -You can also build the docs for a single language: - -``` -scripts/post_translate.sh id html -``` - ## Viewing the docs After building the docs, you can run a lightweight web server to view the generated web pages: ``` -httplz docs/output/html/ -``` - - -Translating the english InaSAFE Documentation -============================================= - -Every language has it's own maintainer, please contact them, -if you want to help. You find a list of current language maintainers at the -end of this document. If your language is not listed, join our community by -sending a mail to and ask for -help. - -HowTo for language maintainers ------------------------------- - -* get an account on github.com -* install required tools on your computer -* login to github and create a fork of the inasafe-doc repository that other - translators can work with. - -Translators now can create their own fork from the forked repository of the -maintainer, commit their translations to their own forked repository and send -pull request to the language maintainer's repository. Once the maintainer -receives a pull request, he should check the changes, accept the pull request -and merge the changes with the official inasafe-doc repository. - -Workflow for adding a new language ----------------------------------- - -* add your locale code in the pre_translate.sh script in the line with 'LOCALE=' -* run 'scripts/pre_translate.sh'. There will be a new directory in the i18n - directory for your language, containing the po-files for all source files -* create an empty(!) directory in the resources directory for your language. The - idea is to ONLY put images in exact the same directory structure if you - want an image to be 'translated'. As default the english one will be used - from the 'en' directory, and only if there is an translated one it will be - found and used. -* add your locale code in the post_translate.sh script in the line with - 'LOCALE=' - -HowTo for translators ---------------------- - -* get an account on github.com -* install required tools on your computer -* login to github and create a fork of the inasafe-doc repository from your - language maintainer. -* git clone your forked inasafe-doc repository to your computer -* run './scripts/pre_translate.sh <language>' locally to build the - translation files -* translate the .po files locally and use an offline editor. - [QtLinguist](https://code.google.com/p/qtlinguistdownload/) - being the highly recommended choice. -* with the english documentation from ./docs/source/ run ' - ./scripts/post_translate.sh languagecode' locally again to check your - translation -* files translated need to be "synchronized" with the ones in the directory of - the forked repo. Commit your changes to your private forked repository and - create a pull request on github. It means that you send a request to the - owners of the repository you forked (language maintainer) asking him to - accept your translations and move them to the "original repository". For - doing that go on github.com, browse on the directory of your repository and - click pull request (https://help.github.com/articles/using-pull-requests). -* your language maintainer will take care that every significant translation go - into the master repository. -* Generally, as soon as you finish editing one or more .po files, you should - commit as soon as possible the edits to the git repository, - in order to minimize the possibility of conflicts. - -The maintainer and translator should update and check the translations -regularly. Therefore you should 'git pull' when you start to work and run the -'scripts/pre_translate.sh <language>' and 'scripts/post_translate.sh -<language>' script after every significant change in the documentation. -This will generate and update the .po files needed for translations. If all -is fine, take care, that the translation go into the repository of your -language maintainer. +httplz docs/output/html/en/ +``` + [InaSAFEImage]: http://inasafe.org/en/_static/img/logo.png [website]: http://inasafe.org/ diff --git a/default.nix b/default.nix index 9dc464d13..10312d9dc 100644 --- a/default.nix +++ b/default.nix @@ -27,10 +27,8 @@ in pkgs.mkShell rec { pythonPackages.twitter pinnedPkgs.sphinx - pinnedPkgs.transifex-client pinnedPkgs.argparse pinnedPkgs.rpl - pinnedPkgs.gettext # Simple http server to test the built docs pinnedPkgs.httplz diff --git a/english_build.sh b/english_build.sh new file mode 100755 index 000000000..d713a88f0 --- /dev/null +++ b/english_build.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash +# Based off the script from QGIS by Tim Sutton and Richard Duivenvoorde + +# Name of the dir containing static files +STATIC=_static +# Path to the documentation root relative to script execution dir +DOCROOT=docs +# Path from execution dir of this script to docs sources (could be just +# '' depending on how your sphinx project is set up). +SOURCE=source + +pushd . +cd $DOCROOT + +SPHINXBUILD=`which sphinx-build` +TEXI2PDF=`which texi2pdf` +BUILDDIR=build +# be sure to remove an old build dir +rm -rf ${BUILDDIR} +mkdir -p ${BUILDDIR} + +# output dirs +PDFDIR=`pwd`/output/pdf +HTMLDIR=`pwd`/output/html +mkdir -p ${PDFDIR} +mkdir -p ${HTMLDIR} + +VERSION=`cat source/conf.py | grep "version = '.*'" | grep -o "[0-9]\.[0-9]"` + +# We need to flush the build dir or the translations don't come through +rm -rf ${BUILDDIR} +mkdir ${BUILDDIR} + +# cleanup all images for the other locale +rm -rf source/static +mkdir -p source/static +# copy english (base) resources to the static dir +cp -r resources/en/* source/static +# now overwrite possible available (localised) resources over the english ones +cp -r resources/${LOCALE}/* source/static + +################################# +# +# HTML Generation +# +################################# +# Now prepare the index/irchat-[locale] template which is a manually translated, +# unique per locale page that gets copied to index.html/irchat.html for the doc +# generation process. +cp templates/index-en.html templates/index.html +cp templates/irchat-en.html templates/irchat.html + +echo "Building HTML for locale 'en'..." +LOG=/tmp/sphinx$$.log +# -n Run in nit-picky mode. Currently, this generates warnings for all missing references. +# -W Turn warnings into errors. This means that the build stops at the first warning and sphinx-build exits with exit status 1. +#${SPHINXBUILD} -nW -d ${BUILDDIR}/doctrees -D language=${LOCALE} -b html source ${HTMLDIR}/${LOCALE} > $LOG +${SPHINXBUILD} -d ${BUILDDIR}/doctrees -D language=en -b html source ${HTMLDIR}/en/ > $LOG +WARNINGS=`cat $LOG | grep warning` +ERRORS=`cat $LOG | grep ERROR` +if [[ $WARNINGS ]] +then + echo "***********************************************" + echo "* Sphinx build produces warnings - Please fix *" + echo $WARNINGS + echo "***********************************************" + exit 1 +fi +if [[ $ERRORS ]] +then + echo "*********************************************" + echo "* Sphinx build produces errors - Please fix *" + echo $ERRORS + echo "*********************************************" + exit 1 +fi + +# Remove the static html copy again +rm templates/index.html +rm templates/irchat.html + +# hack to avoid error when using Search in contents.html +rpl -q '#/../search.html' 'search.html' ./output/html/en/index.html +# same applies for having the IRC-Chat Navigation Link +rpl -q '#/../irchat.html' 'irchat.html' ./output/html/en/index.html + + +rm -rf source/static +rm -rf ${BUILDDIR} + +popd diff --git a/scripts/.htaccess b/scripts/.htaccess deleted file mode 100644 index b4f2e6bf8..000000000 --- a/scripts/.htaccess +++ /dev/null @@ -1,6 +0,0 @@ -Options +FollowSymlinks -MultiViews - -RewriteEngine On -RewriteBase / - -RewriteRule ^$ /en [L] diff --git a/scripts/compress-images.sh b/scripts/compress-images.sh deleted file mode 100755 index ba92d3564..000000000 --- a/scripts/compress-images.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -for FILE in `find ./docs/resources -type f -size +200k -name "*.png" ! -path "./safe_qgis/test/test_data/test_images/*"` -do - echo "Compressing $FILE" - mogrify -dither FloydSteinberg -colors 256 -antialias -strip $FILE -done diff --git a/scripts/create_transifex_resources.sh b/scripts/create_transifex_resources.sh deleted file mode 100755 index 6c3279efe..000000000 --- a/scripts/create_transifex_resources.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash - -# This script is used to register InaSAFE translatable resources with Transifex -# http://transifex.com -# -# Note that this script updates or creates entries in .tx/config file -# -# Tim Sutton, March 2013 - -# -# Sphinx documentation first -# - -LOCALES=`ls ../docs/i18n` - -# We do not translate developer docs as development is mostly done in english -# We do not translate api-docs - -for ITEM in user-docs training -do - for POFILE in `find ../docs/i18n/en/LC_MESSAGES/${ITEM}/ -type f -name '*.po'` - do - # get the po file replacing 'en' with '' - GENERICFILE=`echo $POFILE | sed 's/\/en\//\/\//g' | sed 's/\/\//\//g'` - echo $GENERICFILE - # Get the filename only part of the po file so we can use that - # name when registering the resource - BASE=`basename $GENERICFILE .po` - BASE=`echo $BASE | sed 's/_/-/g' | sed 's/ /-/g'` - RESOURCE=inasafe-doc.${ITEM}-$BASE - - # - # Register each po file as a transifex resource (an individual translatable file) - # - #set -x - #--source this is the source language from which other tranlations are done - #-t this is a PO file that contains translation strings - #--auto-local automatically add to the local .tx/config file - #GENERICFILE the pattern to use when matching for other translation files - #--source-lang the language of the original source file - # add it to the config file - #set -x - tx set --source \ - -t PO \ - -r $RESOURCE \ - -l en "$GENERICFILE" \ - --source-lang en \ - --auto-local \ - --execute \ - --source-file=$POFILE - #set +x - # Now register the language translations for the localised po file against - # this resource. - for LOCALE in $LOCALES - do - LOCALEFILE=`echo $POFILE | sed "s/\/en\//\/$LOCALE\//g"` - tx set -r $RESOURCE -l $LOCALE "$LOCALEFILE" - done - # When we are done in this block we should have created a section in the - # .tx/config file that looks like this: - # - # - # [inasafe-develop.user-docs-faq] - # file_filter = docs/i18n//LC_MESSAGES/user-docs/faq.po - # source_file = docs/i18n/en/LC_MESSAGES/user-docs/faq.po - # source_lang = en - # trans.id = docs/i18n/id/LC_MESSAGES/user-docs/faq.po - # type = PO - done -done - - -#Print out a listing of all registered resources -tx status - -# Push all the resources to the tx server -tx push -s diff --git a/scripts/generate_keywords.py b/scripts/generate_keywords.py deleted file mode 100644 index b2c9ed861..000000000 --- a/scripts/generate_keywords.py +++ /dev/null @@ -1,213 +0,0 @@ -# -*- coding: utf-8 -*- -"""Use IF keywords to generate keywords.rst from template. - -.. tip:: - The keywords_template.rst is rendered with data from Impact Functions - Metadata. This result is written otu to a keywords.rst -""" - -__author__ = 'Christian Christelis ' -__revision__ = '$Format:%H$' -__date__ = '06/06/2014' -__license__ = "GPL" -__copyright__ = 'Copyright 2014, Australia Indonesia Facility for ' -__copyright__ += 'Disaster Reduction' - - -import os -user_home = os.environ["HOME"] - -import sys -sys.path.append('%s/dev/python/inasafe' % user_home) - -from safe_qgis.safe_interface import get_plugins -from safe import get_version - -from rst_generation_tools import SimpleRstTableFormatter as SRTF -from rst_generation_tools import format_rst_paragraph -from jinja2 import Template -import os - - -class UtilityMixin(object): - """Utility Mixin for the extractor class.""" - @staticmethod - def _make_unique(original): - """Makes a list of unhashable objects unique and preserves the order""" - unique = [] - [unique.append(item) for item in original if item not in unique] - return unique - - -class MetadataExtractor(UtilityMixin): - """Helper class to extract our requeride metadata from the IF's metadata - - TODO: Add a rich description of which data is to be collected. - (The logic is quite closely tied to the output - """ - def __init__(self): - self.plugin_metadata = [] - for plugin in get_plugins().values(): - if hasattr(plugin, 'Metadata'): - self.plugin_metadata.append(plugin.Metadata().get_metadata()) - - def get_categories(self): - """Get a unique list of categories - - :rtype : list - :return: A list of categories as they are will appear in the table body. - """ - categories = [] - for metadata in self.plugin_metadata: - keys = metadata['categories'].keys() - for key in keys: - categories.append(key) - categories = list(set(categories)) - categories.sort() - return [['category', category] for category in categories] - - def _collect_metadata(self, category, category_detail='', constraint=None): - """Get a specic detail from a category - - :param category: The category constraint - :type category: str - - :param category_detail: The detail to be extracted from the category. - :type category_detail: str - - :param constraint: A dictionary defined constraint. - :return: dict, None - """ - detail_collection = [] - for metadata in self.plugin_metadata: - if category not in metadata['categories'].keys(): - continue - if constraint: - value = metadata['categories'][category][constraint['field']] - value = value if type(value) == list else [value] - values_match = any( - [(v['id'] in constraint['value']) for v in value]) - if not values_match: - continue - detail = metadata['categories'][category][category_detail] - if type(detail) != list: - detail = [detail] - for d in detail: - detail_collection.append(d) - return detail_collection - - def get_subcategories(self, category): - """Get a list of subcategories in a given category. - - :param category: the category constraint. e.g. hazard - :type category: str - - :return: list of subcategories - :rtype: list - """ - subcategory_data = self._collect_metadata( - category, category_detail='subcategory') - subcategory_names = [ - subcategory['name'] for subcategory in subcategory_data] - subcategory_names = list(set(subcategory_names)) - subcategory_names.sort() - return [ - ['subcategory', subcategory] for subcategory in subcategory_names] - - def get_units(self, category): - """Get all the units in a category. - - :param category: The category. - :type category: str - - :return: list of units - :rtype: list - """ - units_data = self._collect_metadata(category, category_detail='units') - units_names = [unit['name'] for unit in units_data] - units_names = list(set(units_names)) - units_names.sort() - return [['units', unit] for unit in units_names] - - def get_units_subcategory(self, category, subcategories): - """Get units for a given subcategory in a category. - - :param category: The category. - :type category: str - - :param subcategories: The subcategory. - :type subcategories: list - - :return: list of units - :rtype: list - """ - units_data = self._collect_metadata( - category, - category_detail='units', - constraint={'field': 'subcategory', 'value': subcategories}) - units = [unit for unit in units_data] - return self._make_unique(units) - - -if __name__ == "__main__": - me = MetadataExtractor() - category_table = SRTF(['Key', 'Allowed Values'], me.get_categories()) - subcategrory_hazard_table = SRTF( - ['Key', 'Allowed Values'], me.get_subcategories('hazard')) - subcategrory_exposure_table = SRTF( - ['Key', 'Allowed Values'], me.get_subcategories('exposure')) - - hazard_subcategories = [] - for subcategory in [['flood', 'tsunami'], ['volcano', 'tephra'], - ['earthquake']]: - units = me.get_units_subcategory('hazard', subcategory) - units_content = [['units', unit['name']] for unit in units] - units_description = [ - format_rst_paragraph( - unit['description'], - prefix=unit['name'] - ) for unit in units] - units_table = SRTF(['Key', 'Allowed Values'], units_content) - - hazard_subcategories.append({ - 'names': subcategory, - 'table': units_table(), - 'description': units_description}) - - exposure_subcategories = [] - for subcategory in [['population'], ['structure'], ['road']]: - units = me.get_units_subcategory('exposure', subcategory) - units_content = [['units', unit['name']] for unit in units] - units_description = [ - format_rst_paragraph( - unit['description'], - prefix=unit['name'] - ) for unit in units] - units_table = SRTF(['Key', 'Allowed Values'], units_content) - - exposure_subcategories.append({ - 'names': subcategory, - 'table': units_table(), - 'description': units_description}) - - context = { - 'category_table': category_table(), - 'subcategrory_hazard_table': subcategrory_hazard_table(), - 'subcategrory_exposure_table': subcategrory_exposure_table(), - 'hazard_subcategories': hazard_subcategories, - 'exposure_subcategories': exposure_subcategories, - 'version': get_version() - } - - file_path = os.path.dirname(os.path.realpath(__file__)) - template_location = os.path.join( - file_path, 'templates', 'keywords_template.rst') - with open(template_location) as fd: - template = Template(fd.read()) - - inasafe_doc_root = os.path.dirname(file_path) - destination_location = os.path.join( - inasafe_doc_root, - 'docs/source/user-docs/application-help/keywords.rst') - with open(destination_location, 'w') as fd: - fd.write(template.render(**context)) diff --git a/scripts/post_translate.sh b/scripts/post_translate.sh deleted file mode 100755 index 77e3dd7b1..000000000 --- a/scripts/post_translate.sh +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env bash -# Based off the script from QGIS by Tim Sutton and Richard Duivenvoorde - -# Name of the dir containing static files -STATIC=_static -# Path to the documentation root relative to script execution dir -DOCROOT=docs -# Path from execution dir of this script to docs sources (could be just -# '' depending on how your sphinx project is set up). -SOURCE=source - -pushd . -cd $DOCROOT - -SPHINXBUILD=`which sphinx-build` -TEXI2PDF=`which texi2pdf` - -# GENERATE PDF AND HTML FOR FOLLOWING LOCALES (EN IS ALWAYS GENERATED) -LOCALES='id fr' - -if [ $1 ]; then - LOCALES=$1 -fi - -BUILDDIR=build -# be sure to remove an old build dir -rm -rf ${BUILDDIR} -mkdir -p ${BUILDDIR} - -# output dirs -PDFDIR=`pwd`/output/pdf -HTMLDIR=`pwd`/output/html -mkdir -p ${PDFDIR} -mkdir -p ${HTMLDIR} - -VERSION=`cat source/conf.py | grep "version = '.*'" | grep -o "[0-9]\.[0-9]"` - -if [[ $1 = "en" ]]; then - echo "Not running localization for English." -else - for LOCALE in ${LOCALES} - do - for POFILE in `find i18n/${LOCALE}/LC_MESSAGES/ -type f -name '*.po'` - do - MOFILE=`echo ${POFILE} | sed -e 's,\.po,\.mo,'` - # Compile the translated strings - echo "Compiling messages to ${MOFILE}" - msgfmt --statistics -o ${MOFILE} ${POFILE} - done - done -fi - -# We need to flush the build dir or the translations don't come through -rm -rf ${BUILDDIR} -mkdir ${BUILDDIR} -#Add english to the list and generated docs -#LOCALES+=' en' - -if [ $1 ]; then - LOCALES=$1 -fi - -for LOCALE in ${LOCALES} -# Compile the html docs for this locale -do - # cleanup all images for the other locale - rm -rf source/static - mkdir -p source/static - # copy english (base) resources to the static dir - cp -r resources/en/* source/static - # now overwrite possible available (localised) resources over the english ones - cp -r resources/${LOCALE}/* source/static - - ################################# - # - # HTML Generation - # - ################################# - # Now prepare the index/irchat-[locale] template which is a manually translated, - # unique per locale page that gets copied to index.html/irchat.html for the doc - # generation process. - cp templates/index-${LOCALE}.html templates/index.html - cp templates/irchat-${LOCALE}.html templates/irchat.html - - echo "Building HTML for locale '${LOCALE}'..." - LOG=/tmp/sphinx$$.log - # -n Run in nit-picky mode. Currently, this generates warnings for all missing references. - # -W Turn warnings into errors. This means that the build stops at the first warning and sphinx-build exits with exit status 1. - #${SPHINXBUILD} -nW -d ${BUILDDIR}/doctrees -D language=${LOCALE} -b html source ${HTMLDIR}/${LOCALE} > $LOG - ${SPHINXBUILD} -d ${BUILDDIR}/doctrees -D language=${LOCALE} -b html source ${HTMLDIR}/${LOCALE} > $LOG - WARNINGS=`cat $LOG | grep warning` - ERRORS=`cat $LOG | grep ERROR` - if [[ $WARNINGS ]] - then - echo "***********************************************" - echo "* Sphinx build produces warnings - Please fix *" - echo $WARNINGS - echo "***********************************************" - exit 1 - fi - if [[ $ERRORS ]] - then - echo "*********************************************" - echo "* Sphinx build produces errors - Please fix *" - echo $ERRORS - echo "*********************************************" - exit 1 - fi - - # Remove the static html copy again - rm templates/index.html - rm templates/irchat.html - - # hack to avoid error when using Search in contents.html - rpl -q '#/../search.html' 'search.html' ./output/html/${LOCALE}/index.html - # same applies for having the IRC-Chat Navigation Link - rpl -q '#/../irchat.html' 'irchat.html' ./output/html/${LOCALE}/index.html - -# defaulting to generation of PDF too -# but to make travis build only html, add html as second parameter (see .travis.yml) -CREATE_PDF=true -if [[ $2 == html ]]; then - CREATE_PDF=false -fi - -if $CREATE_PDF; then - - ################################# - # - # PDF Generation - # - ################################# - # experimental sphinxbuild using rst2pdf... - #${SPHINXBUILD} -d ${BUILDDIR}/doctrees -D language=${LOCALE} -b pdf source ${BUILDDIR}/latex/${LOCALE} - - # Traditional using texi2pdf.... - # Compile the latex docs for that locale - # -n Run in nit-picky mode. Currently, this generates warnings for all missing references. - # -W Turn warnings into errors. This means that the build stops at the first warning and sphinx-build exits with exit status 1. - #${SPHINXBUILD} -nW -d ${BUILDDIR}/doctrees -D language=${LOCALE} -b latex source ${BUILDDIR}/latex/${LOCALE} > /dev/null 2>&1 - ${SPHINXBUILD} -n -d ${BUILDDIR}/doctrees -D language=${LOCALE} -b latex source ${BUILDDIR}/latex/${LOCALE} > /dev/null 2>&1 - # Compile the pdf docs for that locale - # we use texi2pdf since latexpdf target is not available via - # sphinx-build which we need to use since we need to pass language flag - pushd . - cp resources/InaSAFE_footer.png ${BUILDDIR}/latex/${LOCALE}/ - cd ${BUILDDIR}/latex/${LOCALE}/ - # Manipulate our latex a little - first add a standard footer - - FOOTER1="\usepackage{wallpaper}" - FOOTER2="\LRCornerWallPaper{1}{InaSAFE_footer.png}" - - # need to build 3x to have proper toc and index - if [ -z $TEXI2PDF ] - then - echo You do not have texinfo package installed. Please install! - exit 1 - fi - - texi2pdf --quiet InaSAFE-Documentation.tex > /dev/null 2>&1 - texi2pdf --quiet InaSAFE-Documentation.tex > /dev/null 2>&1 - texi2pdf --quiet InaSAFE-Documentation.tex > /dev/null 2>&1 - mv InaSAFE-Documentation.pdf ${PDFDIR}/InaSAFE-${VERSION}-Documentation-${LOCALE}.pdf - popd -fi - -done - -rm -rf source/static -rm -rf ${BUILDDIR} - -popd diff --git a/scripts/pre_translate.sh b/scripts/pre_translate.sh deleted file mode 100755 index 738b21327..000000000 --- a/scripts/pre_translate.sh +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/env bash - -# INASAFE_DEV_PATH=$HOME/dev/python/inasafe/ -# export QGIS_PREFIX_PATH=/usr/local/qgis-2.8/ - -# if [ -d $INASAFE_DEV_PATH ] -# then -# export INASAFE_DEV_PATH=$HOME/dev/python/inasafe/ -# else -# echo Please set INASAFE_DEV_PATH as PATH to your local -# echo clone of inasafe repository inside this script -# exit 1 -# fi - -# export LD_LIBRARY_PATH=$QGIS_PREFIX_PATH/lib -# export PYTHONPATH=$QGIS_PREFIX_PATH/share/qgis/python:$QGIS_PREFIX_PATH/share/qgis/python/plugins:$INASAFE_DEV_PATH:$PYTHONPATH -# export QGIS_DEBUG=0 -# export QGIS_LOG_FILE=/dev/null -# export QGIS_DEBUG_FILE=/dev/null - -# Path to the documentation root relative to script execution dir -DOCROOT=docs -# Path from execution dir of this script to docs sources (could be just -# '' depending on how your sphinx project is set up). -SOURCE=source -# Name of the dir containing static files -STATIC=static - -LOCALES='id fr' - -if [ $1 ]; then - LOCALES=$1 -fi - -pushd . -cd $DOCROOT - -# Create / update the translation catalogue - this will generate the master .pot files -mkdir -p i18n/pot -# Create a (temporary) static directory in source to hold all (localised ) static content -mkdir -p source/static - -# copy english resources to static to be able to do a proper sphinx-build -cp -r resources/en/* source/static/ - -rm -rf - -BUILDDIR=build -# be sure to remove an old build dir -rm -rf ${BUILDDIR} -mkdir ${BUILDDIR} - -# Create / update the translation catalogue - this will generate the master -# .pot files -sphinx-build -d ${BUILDDIR}/doctrees -b gettext $SOURCE i18n/pot/ - -# We do not want the developer-docs/api-docs being translated so take them out of here -rm -rf i18n/pot/developer-docs -rm -rf i18n/pot/api-docs - -# Now iteratively update the locale specific .po files with any new strings -# needed translation -for LOCALE in ${LOCALES} -do - echo "Updating translation catalog for ${LOCALE}:" - echo "------------------------------------" - mkdir -p i18n/${LOCALE}/LC_MESSAGES - # cleanup images from static (different locales can have different localized images) - rm -rf source/static/* - # Clone the en resources and then overwrite with any localised versions of the same files. - cp -r resources/en/* source/static/ - PODIR=resources/${LOCALE} - if [ -d $PODIR ]; - then - cp -r ${PODIR}/* source/static/ - fi - - # Merge or copy all the updated pot files over to locale specific po files - for FILE in `find i18n/pot/ -type f` - do - POTFILE=${FILE} - POFILE=`echo ${POTFILE} | sed -e 's,\.pot,\.po,' | sed -e 's,pot,'${LOCALE}'/LC_MESSAGES,'` - if [ -f $POFILE ]; - then - echo "Updating strings for ${POFILE}" - msgmerge -U ${POFILE} ${POTFILE} - else - echo "Creating ${POFILE}" - mkdir -p `echo $(dirname ${POFILE})` - cp ${POTFILE} ${POFILE} - fi - done -done - -# Now get rid of temporary POT files -rm -rf i18n/pot -rm -rf source/static - -popd diff --git a/scripts/rst_generation_tools.py b/scripts/rst_generation_tools.py deleted file mode 100644 index 01c5fadb3..000000000 --- a/scripts/rst_generation_tools.py +++ /dev/null @@ -1,217 +0,0 @@ -# -*- coding: utf-8 -*- -"""General tools for formatting rst. - -.. tip:: - Common tools used when generating rst formatted data. -""" - -__author__ = 'Christian Christelis ' -__revision__ = '$Format:%H$' -__date__ = '06/06/2014' -__license__ = "GPL" -__copyright__ = 'Copyright 2014, Australia Indonesia Facility for ' -__copyright__ += 'Disaster Reduction' - -from textwrap import wrap - - -class HorizontalBorder(object): - """Manage the border elements of a simple rst table those are the: ====== - """ - def __init__(self, default_length=5): - """Constructor - - :param default_length: The default (min) character length of the column - :type default_length: int - """ - self.length = default_length - - def column_width(self, column_content): - """Determine the column width, by passing the column content. - - :param column_content: The content of the column - :type column_content: list - """ - for entry in column_content: - if isinstance(entry, basestring): - self.length = max(len(entry), self.length) - else: - self.length = max(len('%s' % entry), self.length) - - def __call__(self): - """Generate the horizontal border for a given column - - :return: sequence of '=' with an offset - :rtype: str - """ - return '%s ' % ('=' * self.length) - - -class SimpleRstTableFormatter(object): - """Create a simple rst table the table conforms to: - - .. table:: - - =========== ============== - Key Allowed Values - =========== ============== - subcategory tsunami - subcategory flood - subcategory volcano - subcategory earthquake - =========== ============== - - """ - def __init__(self, heading=None, rows=None): - """Instantiate the object with optionally heading and rows. - - :param heading: The heading - :type heading: list, None - - :param rows: The rows - :type rows: list, None - """ - if not heading: - heading = [] - else: - self.heading = self._stringify(heading) - if not rows: - rows = [] - else: - self.rows = [self._stringify(row) for row in rows] - - def add_heading(self, heading): - """Add a heading to this table. - - :param heading: The heading. - :type heading; list - """ - self.heading = self._stringify(heading) - - def add_row(self, row): - """Add a row to this table. - - :param row: The row. - :type row: list - """ - self.rows.append(self._stringify(row)) - @staticmethod - def _stringify(row): - """Convert the objects in the row to strings - - :param row: The row - :type row: list - - :return: Row with each element formatted to a string - :rtype: list - """ - return ['%s' % r for r in row] - - @staticmethod - def _left_fill(row, boarders): - """Fill each element of the row to the same size as the row of borders - - :param row: The row to be updated - :type row: list - - :param boarders: The row of borders - :type: list - - :return: The update row. - :rtype: list - """ - return [r.ljust(len(b)) for (r, b) in zip(row, boarders)] - - def __call__(self): - """ Get the table - - :return: RST formatted table. - :rtype: str - """ - if not self.heading or not self.rows: - raise ReferenceError - column_count = len(self.heading) - borders = [] - for count in range(column_count): - border = HorizontalBorder() - column = [self.heading[count]] + [row[count] for row in self.rows] - border.column_width(column) - borders.append(border()) - table = '.. table::\n\n' - for row in [borders, self.heading, borders] + self.rows + [borders]: - row = self._left_fill(row, borders) - table += ' %s\n' % ' '.join(row) - return table - - -def uniform_markers(text, markers, placeholder=None): - """Make all markers in the text the same. - - :param text: The text to be updated. - :type text: str - - :param markers: The markers to be made uniform. - :type markers: list - - :param placeholder: The optional placeholder marker - :type placeholder: str, None - - :return: The updated text. - :rtype: str - """ - placeholder = placeholder or markers[0] - for marker in markers[1:]: - text = text.replace(marker, placeholder) - return text - - -def replace_bold(text): - """Convert html bold to rst bold in text. - - :param text: The text to be updated. - :type text: str - - :return: The updated text. - :rtype: str - """ - text = uniform_markers(text, ['', '', '', '']) - return text.replace('', '**') - - -def replace_italic(text): - """Convert html italic to rst italic in text. - - :param text: The text to be updated. - :type text: str - - :return: The updated text. - :rtype: str - """ - text = uniform_markers(text, ['', '', '', '']) - return text.replace('', '*') - - -def format_rst_paragraph(paragraph, prefix=None, width=79): - """ Convert a paragraph, with html formatting to rst. - - :param paragraph: The paragraph to be formatted. - :type paragraph: str - - :param prefix: Text to highlight and prefix to paragraph. - :type prefix: str, None - - :param width: The width of the paragraph. - :type width: int - - :return: The formatted paragraph. - :rtype: str - """ - paragraph = replace_bold(paragraph) - paragraph = replace_italic(paragraph) - if prefix: - paragraph = '**%s**: %s' % (prefix, paragraph) - paragraph = wrap(paragraph, width) - return '\n'.join(paragraph) - - - diff --git a/scripts/update_all_dev.sh b/scripts/update_all_dev.sh deleted file mode 100755 index cd7e48cda..000000000 --- a/scripts/update_all_dev.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -git pull -tx pull -./scripts/pre_translate.sh -./scripts/pre_translate.sh en -./scripts/create_transifex_resources.sh -./scripts/post_translate.sh