From 14385fa0cc0317debc6ac577fb388fbf55cf4957 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sat, 5 Dec 2020 21:24:59 +0100 Subject: [PATCH 01/49] Language files didn't make the cut when tests moved to separate files @flowertwig-org I still cannot compile pot-files and I'm not sure wheter you give the relative path to files the way I'm doing. Please check it out. --- locales/en/LC_MESSAGES/webperf-core.po | 12 ++++++------ locales/sv/LC_MESSAGES/webperf-core.po | 12 ++++++------ locales/webperf-core.pot | 12 ++++++------ 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index 83907840..fb0de5f5 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -15,27 +15,27 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: checks.py:47 +#: tests/page_not_found.py:35 msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" msgstr "* Wrong status code. Got {0} when 404 would be correct.\n" -#: checks.py:67 +#: tests/page_not_found.py:55 msgid "TEST_404_REVIEW_NO_TITLE" msgstr "* Found no page title in the page metadata.\n" -#: checks.py:77 +#: tests/page_not_found.py:65 msgid "TEST_404_REVIEW_MAIN_HEADER" msgstr "* Found no headline (h1)\n" -#: checks.py:135 +#: tests/page_not_found.py:124 msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" msgstr "* Seems to lack text describing that an error has occurred (in Swedish).\n" -#: checks.py:143 +#: tests/page_not_found.py:131 msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" msgstr "* Text content length is below 150 characters, indicating that the user is not being referred.\n" -#: checks.py:146 +#: tests/page_not_found.py:134 msgid "TEST_REVIEW_NO_REMARKS" msgstr "* No remarks" diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index fc21f098..50af6efb 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -15,27 +15,27 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: checks.py:47 +#: tests/page_not_found.py:35 msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" msgstr "* Fel statuskod. Fick {0} när 404 vore korrekt.\n" -#: checks.py:67 +#: tests/page_not_found.py:55 msgid "TEST_404_REVIEW_NO_TITLE" msgstr "* Hittade ingen titel på sidan\n" -#: checks.py:77 +#: tests/page_not_found.py:65 msgid "TEST_404_REVIEW_MAIN_HEADER" msgstr "* Hittade ingen huvudrubrik (h1)\n" -#: checks.py:135 +#: tests/page_not_found.py:124 msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" msgstr "* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n" -#: checks.py:143 +#: tests/page_not_found.py:131 msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" msgstr "* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n" -#: checks.py:146 +#: tests/page_not_found.py:134 msgid "TEST_REVIEW_NO_REMARKS" msgstr "* Inga anmärkningar." diff --git a/locales/webperf-core.pot b/locales/webperf-core.pot index 035f6f56..8573e50b 100644 --- a/locales/webperf-core.pot +++ b/locales/webperf-core.pot @@ -15,27 +15,27 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: checks.py:47 +#: tests/page_not_found.py:35 msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" msgstr "" -#: checks.py:67 +#: tests/page_not_found.py:55 msgid "TEST_404_REVIEW_NO_TITLE" msgstr "" -#: checks.py:77 +#: tests/page_not_found.py:65 msgid "TEST_404_REVIEW_MAIN_HEADER" msgstr "" -#: checks.py:135 +#: tests/page_not_found.py:124 msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" msgstr "" -#: checks.py:143 +#: tests/page_not_found.py:131 msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" msgstr "" -#: checks.py:146 +#: tests/page_not_found.py:134 msgid "TEST_REVIEW_NO_REMARKS" msgstr "" From e330dab1e8f61d487709620507f4a80f9c1af7f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sat, 5 Dec 2020 21:25:04 +0100 Subject: [PATCH 02/49] Create lighthouse_a11y.py --- tests/lighthouse_a11y.py | 99 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 99 insertions(+) create mode 100644 tests/lighthouse_a11y.py diff --git a/tests/lighthouse_a11y.py b/tests/lighthouse_a11y.py new file mode 100644 index 00000000..df5df07b --- /dev/null +++ b/tests/lighthouse_a11y.py @@ -0,0 +1,99 @@ +#-*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * + +### DEFAULTS +request_timeout = config.http_request_timeout +googlePageSpeedApiKey = config.googlePageSpeedApiKey + +def run_test(url, strategy='mobile', category='performance'): + """ + perf = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=performance&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw + a11y = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=accessibility&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw + practise = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=best-practices&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw + pwa = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=pwa&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw + seo = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=seo&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw + """ + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) is 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = '* Webbplatsen har inga uppenbara fel inom tillgänglighet!\n' + elif fails <= 2: + points = 4 + review = '* Webbplatsen kan bli mer tillgänglig, men är helt ok.\n' + elif fails <= 3: + points = 3 + review = '* Genomsnittlig tillgänglighet men behöver bli bättre.\n' + elif fails <= 5: + points = 2 + review = '* Webbplatsen är dålig på tillgänglighet för funktions­varierade personer.\n' + elif fails > 5: + points = 1 + review = '* Väldigt dålig tillgänglighet!\n' + + review += '* Antal problem med tillgänglighet: {} st\n'.format(fails) + + + if fails is not 0: + review += '\nTillgänglighets­problem:\n' + + for key, value in return_dict.items(): + if value is 0: + review += '* {}\n'.format(fail_dict[key]) + #print(key) + + return (points, review, return_dict) From 799500c034c9fb0ce0e2cb05d12e88bb895f22cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sat, 5 Dec 2020 21:32:37 +0100 Subject: [PATCH 03/49] Lighthouse tests --- tests/lighthouse_a11y.py | 9 +-- tests/lighthouse_best_practice.py | 92 +++++++++++++++++++++++++++++++ tests/lighthouse_pwa.py | 92 +++++++++++++++++++++++++++++++ tests/lighthouse_seo.py | 92 +++++++++++++++++++++++++++++++ 4 files changed, 277 insertions(+), 8 deletions(-) create mode 100644 tests/lighthouse_best_practice.py create mode 100644 tests/lighthouse_pwa.py create mode 100644 tests/lighthouse_seo.py diff --git a/tests/lighthouse_a11y.py b/tests/lighthouse_a11y.py index df5df07b..de6cd785 100644 --- a/tests/lighthouse_a11y.py +++ b/tests/lighthouse_a11y.py @@ -15,14 +15,7 @@ request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url, strategy='mobile', category='performance'): - """ - perf = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=performance&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw - a11y = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=accessibility&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw - practise = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=best-practices&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw - pwa = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=pwa&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw - seo = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=seo&strategy=mobile&url=https://webperf.se&key=AIzaSyAZRZtljuro4yWC0QpVvOubtkcXLPOL0cw - """ +def run_test(url, strategy='mobile', category='accessibility'): check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) diff --git a/tests/lighthouse_best_practice.py b/tests/lighthouse_best_practice.py new file mode 100644 index 00000000..101ac320 --- /dev/null +++ b/tests/lighthouse_best_practice.py @@ -0,0 +1,92 @@ +#-*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * + +### DEFAULTS +request_timeout = config.http_request_timeout +googlePageSpeedApiKey = config.googlePageSpeedApiKey + +def run_test(url, strategy='mobile', category='best-practices'): + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&strategy={2}&key={3}'.format(category, check_url, strategy, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) is 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = '* Webbplatsen följer god praxis fullt ut!\n' + elif fails <= 2: + points = 4 + review = '* Webbplatsen har ändå förbättrings­potential.\n' + elif fails <= 3: + points = 3 + review = '* Genomsnittlig efterlevnad till praxis.\n' + elif fails <= 4: + points = 2 + review = '* Webbplatsen är ganska dålig på att följa god praxis.\n' + elif fails > 4: + points = 1 + review = '* Webbplatsen är inte alls bra på att följa praxis!\n' + + review += '* Antal problem med god praxis: {} st\n'.format(fails) + + + if fails is not 0: + review += '\nProblem:\n' + + for key, value in return_dict.items(): + if value is 0: + review += '* {}\n'.format(fail_dict[key]) + #print(key) + + return (points, review, return_dict) diff --git a/tests/lighthouse_pwa.py b/tests/lighthouse_pwa.py new file mode 100644 index 00000000..6ab9281f --- /dev/null +++ b/tests/lighthouse_pwa.py @@ -0,0 +1,92 @@ +#-*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * + +### DEFAULTS +request_timeout = config.http_request_timeout +googlePageSpeedApiKey = config.googlePageSpeedApiKey + +def run_test(url, strategy='mobile', category='pwa'): + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) is 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + #review = '* Webbplatsen följer fullt ut praxis för progressiva webbappar!\n' + elif fails <= 4: + points = 4 + #review = '* Webbplatsen har lite förbättrings­potential för en progressiv webbapp.\n' + elif fails <= 7: + points = 3 + #review = '* Genomsnittlig efterlevnad till praxis för progressiva webbappar.\n' + elif fails <= 9: + points = 2 + #review = '* Webbplatsen är ganska dålig som progressiv webbapp.\n' + elif fails > 9: + points = 1 + #review = '* Webbplatsen är inte alls bra som progressiv webbapp :/\n' + + review += '* Antal problem med praxis för progressiva webbappar: {} st\n'.format(fails) + + + if fails is not 0: + review += '\nProblem:\n' + + for key, value in return_dict.items(): + if value is 0: + review += '* {}\n'.format(fail_dict[key]) + #print(key) + + return (points, review, return_dict) diff --git a/tests/lighthouse_seo.py b/tests/lighthouse_seo.py new file mode 100644 index 00000000..f55be944 --- /dev/null +++ b/tests/lighthouse_seo.py @@ -0,0 +1,92 @@ +#-*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * + +### DEFAULTS +request_timeout = config.http_request_timeout +googlePageSpeedApiKey = config.googlePageSpeedApiKey + +def run_test(url, strategy='mobile', category='seo'): + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) is 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = '* Webbplatsen följer god SEO-praxis fullt ut!\n' + elif fails <= 2: + points = 4 + review = '* Webbplatsen har ändå förbättrings­potential inom SEO.\n' + elif fails <= 3: + points = 3 + review = '* Genomsnittlig efterlevnad till SEO-praxis.\n' + elif fails <= 4: + points = 2 + review = '* Webbplatsen är ganska dålig på sökmotoroptimering.\n' + elif fails > 4: + points = 1 + review = '* Webbplatsen är inte alls bra på sökmotoroptimering!\n' + + review += '* Antal problem med god praxis: {} st\n'.format(fails) + + + if fails is not 0: + review += '\nProblem:\n' + + for key, value in return_dict.items(): + if value is not None and value < 1: + review += '* {}\n'.format(fail_dict[key]) + #print(key) + + return (points, review, return_dict) From f09f598e1ebe71c8a9be13032ade93e44c3bb581 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sat, 5 Dec 2020 22:46:44 +0100 Subject: [PATCH 04/49] Lang files checked @flowertwig-org, please try compiling these and upload the mo-files. --- locales/en/LC_MESSAGES/webperf-core.po | 86 +++++++++++++++++------- locales/sv/LC_MESSAGES/webperf-core.po | 91 +++++++++++++++++++------- locales/webperf-core.pot | 83 ++++++++++++++++------- 3 files changed, 190 insertions(+), 70 deletions(-) diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index fb0de5f5..70049f39 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -39,79 +39,119 @@ msgstr "* Text content length is below 150 characters, indicating that the user msgid "TEST_REVIEW_NO_REMARKS" msgstr "* No remarks" -#: default.py:21 +#: default.py:24 msgid "TEXT_TEST_START_HEADER" msgstr "###############################################" -#: default.py:25 +#: default.py:28 msgid "TEXT_TESTING_NUMBER_OF_SITES" msgstr "Number of websites being tested {0}" -#: default.py:30 +#: default.py:33 msgid "TEXT_TESTING_SITE" msgstr "{0}. Testing website {1}" -#: default.py:46 +#: default.py:61 msgid "TEXT_SITE_RATING" msgstr "Rating: " -#: default.py:48 +#: default.py:63 msgid "TEXT_SITE_REVIEW" msgstr "Review:\n" -#: default.py:66 +#: default.py:81 msgid "TEXT_EXCEPTION" msgstr "Exception, someone should look at this!" -#: default.py:74 +#: default.py:89 msgid "TEXT_TESTING_START_HEADER" msgstr "### {0} ###" -#: default.py:78 -msgid "TEXT_TEST_GOOGLE_PAGESPEED" -msgstr "###############################\nRunning test: 0 - Google Pagespeed" +#: default.py:93 +msgid "TEST_GOOGLE_LIGHTHOUSE" +msgstr "###############################\nRunning test: 0 - Google Lighthouse Performance" -#: default.py:81 +#: default.py:96 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y" +msgstr "###############################\nRunning test: 10 - Google Lighthouse Accessibility" + +#: default.py:99 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO" +msgstr "###############################\nRunning test: 4 - Google Lighthouse SEO" + +#: default.py:102 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA" +msgstr "###############################\nRunning test: 8 - Google Lighthouse PWA" + +#: default.py:105 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" +msgstr "###############################\nRunning test: 5 - Google Lighthouse Best Practice" + +#: default.py:107 msgid "TEXT_TEST_PAGE_NOT_FOUND" msgstr "###############################\nRunning test: 2 - 404 (Page not Found)" -#: default.py:84 +#: default.py:111 msgid "TEXT_TEST_HTML" msgstr "###############################\nRunning test: 6 - HTML" -#: default.py:87 +#: default.py:114 msgid "TEXT_TEST_CSS" msgstr "###############################\nRunning test: 7 - CSS" -#: default.py:90 +#: default.py:117 msgid "TEXT_TEST_WEBBKOLL" msgstr "###############################\nRunning test: 20 - Webbkoll" -#: default.py:96 +#: default.py:120 +msgid "TEXT_TEST_STANDARD_FILES" +msgstr "###############################\nRunning test: 9 - Standard files" + +#: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Valid arguments for option -t/--test:" -#: default.py:97 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_PAGESPEED" -msgstr "-t 0\t: Google Pagespeed" +#: default.py:128 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" +msgstr "-t 1\t: Google Lighthouse Performance" + +#: default.py:129 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" +msgstr "-t 4\t: Google Lighthouse SEO" + +#: default.py:130 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y" +msgstr "-t 10\t: Google Lighthouse Accessibility" -#: default.py:98 +#: default.py:131 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA" +msgstr "-t 8\t: Google Lighthouse PWA" + +#: default.py:132 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" +msgstr "-t 5\t: Google Lighthouse Best PRactice" + +#: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" msgstr "-t 2\t: 404 (Page not Found)" -#: default.py:99 +#: default.py:134 msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" msgstr "-t 6\t: HTML" -#: default.py:100 +#: default.py:135 msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" msgstr "-t 7\t: CSS" -#: default.py:101 +#: default.py:136 msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" msgstr "-t 20\t: Webbkoll" +#: default.py:137 +msgid "TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES" +msgstr "-t 9\t: Standard files" + #: default.py:137 default.py:141 default.py:146 default.py:213 msgid "TEXT_COMMAND_USAGE" -msgstr "\n\tWebPerf Core\n\n\tUsage:\ndefault.py -u https://webperf.se\n\n\tOptions and arguments:\n\t-h/--help\t\t\t: Help information on how to use script\n\t-u/--url \t\t: website url to test against\n\t-t/--test <1/2/6/7/20>\t\t: runs ONE specific test against website(s)\n\t-r/--review\t\t\t: show reviews in terminal\n\t-i/--input \t\t: input file path (.json/.sqlite)\n\t-o/--output \t\t: output file path (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: website url (required in compination with -i/--input)\n\t-D/--deleteUrl \t: website url (required in compination with -i/--input)\n\t-L/--language \t: language used for output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tUsage:\ndefault.py -u https://webperf.se\n\n\tOptions and arguments:\n\t-h/--help\t\t\t: Help information on how to use script\n\t-u/--url \t\t: website url to test against\n\t-t/--test <1/2/4/5/6/7/8/9/10/20>\t\t: runs ONE specific test against website(s)\n\t-r/--review\t\t\t: show reviews in terminal\n\t-i/--input \t\t: input file path (.json/.sqlite)\n\t-o/--output \t\t: output file path (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: website url (required in compination with -i/--input)\n\t-D/--deleteUrl \t: website url (required in compination with -i/--input)\n\t-L/--language \t: language used for output(en = default/sv)" diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index 50af6efb..8a36e9af 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -39,79 +39,120 @@ msgstr "* Information är under 150 tecken, vilket tyder på att användaren int msgid "TEST_REVIEW_NO_REMARKS" msgstr "* Inga anmärkningar." -#: default.py:21 +#: default.py:24 msgid "TEXT_TEST_START_HEADER" msgstr "###############################################" -#: default.py:25 +#: default.py:28 msgid "TEXT_TESTING_NUMBER_OF_SITES" msgstr "Webbadresser som testas {0}" -#: default.py:30 +#: default.py:33 msgid "TEXT_TESTING_SITE" msgstr "{0}. Testar adress {1}" -#: default.py:46 +#: default.py:61 msgid "TEXT_SITE_RATING" msgstr "Betyg: " -#: default.py:48 +#: default.py:63 msgid "TEXT_SITE_REVIEW" msgstr "Omdöme:\n" -#: default.py:66 +#: default.py:81 msgid "TEXT_EXCEPTION" msgstr "Fel, någon behöver ta en titt på detta." -#: default.py:74 +#: default.py:89 msgid "TEXT_TESTING_START_HEADER" msgstr "### {0} ###" -#: default.py:78 -msgid "TEXT_TEST_GOOGLE_PAGESPEED" -msgstr "###############################\nKör test: 0 - Google Pagespeed" +#: default.py:93 +msgid "TEST_GOOGLE_LIGHTHOUSE" +msgstr "###############################\nKör test: 0 - Google Lighthouse prestanda" -#: default.py:81 +#: default.py:96 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y" +msgstr "###############################\nKör test: 10 - Google Lighthouse tillgänglighet" + +#: default.py:99 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO" +msgstr "###############################\nKör test: 4 - Google Lighthouse sökmotoroptimering" + +#: default.py:102 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA" +msgstr "###############################\nKör test: 8 - Google Lighthouse progressiv webbapp" + +#: default.py:105 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" +msgstr "###############################\nKör test: 5 - Google Lighthouse god praxis" + + +#: default.py:107 msgid "TEXT_TEST_PAGE_NOT_FOUND" -msgstr "###############################\nKör test: 2 - 404 (Page not Found)" +msgstr "###############################\nKör test: 2 - 404 (sida finns inte)" -#: default.py:84 +#: default.py:111 msgid "TEXT_TEST_HTML" msgstr "###############################\nKör test: 6 - HTML" -#: default.py:87 +#: default.py:114 msgid "TEXT_TEST_CSS" msgstr "###############################\nKör test: 7 - CSS" -#: default.py:90 +#: default.py:117 msgid "TEXT_TEST_WEBBKOLL" msgstr "###############################\nKör test: 20 - Webbkoll" -#: default.py:96 +#: default.py:120 +msgid "TEXT_TEST_STANDARD_FILES" +msgstr "###############################\nKör test: 9 - Standardfiler" + +#: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Giltiga argument att välja på -t/--test:" -#: default.py:97 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_PAGESPEED" -msgstr "-t 0\t: Google Pagespeed" +#: default.py:128 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" +msgstr "-t 0\t: Google Lighthouse prestanda" + +#: default.py:129 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" +msgstr "-t 4\t: Google Lighthouse sökmotoroptimering" + +#: default.py:130 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y" +msgstr "-t 10\t: Google Lighthouse tillgänglighet" -#: default.py:98 +#: default.py:131 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA" +msgstr "-t 8\t: Google Lighthouse progressiv webbapp" + +#: default.py:132 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" +msgstr "-t 5\t: Google Lighthouse god praxis" + +#: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" -msgstr "-t 2\t: 404 (Page not Found)" +msgstr "-t 2\t: 404 (sida finns inte)" -#: default.py:99 +#: default.py:134 msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" msgstr "-t 6\t: HTML" -#: default.py:100 +#: default.py:135 msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" msgstr "-t 7\t: CSS" -#: default.py:101 +#: default.py:136 msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" msgstr "-t 20\t: Webbkoll" +#: default.py:137 +msgid "TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES" +msgstr "-t 9\t: Standardfiler" + #: default.py:137 default.py:141 default.py:146 default.py:213 msgid "TEXT_COMMAND_USAGE" -msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test <1/2/6/7/20>\t\t: kör ett enda specifikt test mot angiven webbplats(er)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test <1/2/4/5/6/7/8/9/10/20>\t\t: kör ett enda specifikt test mot angiven webbplats(er)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" diff --git a/locales/webperf-core.pot b/locales/webperf-core.pot index 8573e50b..624ac38a 100644 --- a/locales/webperf-core.pot +++ b/locales/webperf-core.pot @@ -39,79 +39,118 @@ msgstr "" msgid "TEST_REVIEW_NO_REMARKS" msgstr "" -#: default.py:21 +#: default.py:24 msgid "TEXT_TEST_START_HEADER" msgstr "" -#: default.py:25 +#: default.py:28 msgid "TEXT_TESTING_NUMBER_OF_SITES" msgstr "" -#: default.py:30 +#: default.py:33 msgid "TEXT_TESTING_SITE" msgstr "" -#: default.py:46 +#: default.py:61 msgid "TEXT_SITE_RATING" msgstr "" -#: default.py:48 +#: default.py:63 msgid "TEXT_SITE_REVIEW" msgstr "" -#: default.py:66 +#: default.py:81 msgid "TEXT_EXCEPTION" msgstr "" -#: default.py:74 +#: default.py:89 msgid "TEXT_TESTING_START_HEADER" msgstr "" -#: default.py:78 -msgid "TEXT_TEST_GOOGLE_PAGESPEED" +#: default.py:93 +msgid "TEST_GOOGLE_LIGHTHOUSE" msgstr "" -#: default.py:81 +#: default.py:96 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y" +msgstr "" + +#: default.py:99 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO" +msgstr "" + +#: default.py:102 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA" +msgstr "" + +#: default.py:105 +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" +msgstr "" + +#: default.py:107 msgid "TEXT_TEST_PAGE_NOT_FOUND" msgstr "" -#: default.py:84 +#: default.py:111 msgid "TEXT_TEST_HTML" msgstr "" -#: default.py:87 +#: default.py:114 msgid "TEXT_TEST_CSS" msgstr "" -#: default.py:90 +#: default.py:117 msgid "TEXT_TEST_WEBBKOLL" msgstr "" -#: default.py:96 +#: default.py:120 +msgid "TEXT_TEST_STANDARD_FILES" +msgstr "" + +#: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "" -#: default.py:97 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_PAGESPEED" +#: default.py:128 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" +msgstr "" + +#: default.py:129 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" +msgstr "" + +#: default.py:130 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y" +msgstr "" + +#: default.py:131 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA" msgstr "" -#: default.py:98 +#: default.py:132 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" +msgstr "" + +#: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" msgstr "" -#: default.py:99 +#: default.py:134 msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" msgstr "" -#: default.py:100 +#: default.py:135 msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" msgstr "" -#: default.py:101 +#: default.py:136 msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" msgstr "" -#: default.py:212 default.py:243 -msgid "TEXT_COMMAND_USAGE" +#: default.py:137 +msgid "TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES" msgstr "" +#: default.py:248 default.py:279 +msgid "TEXT_COMMAND_USAGE" +msgstr "" From e013883855a2625fdb63bf49c6c2003f1838b2cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sat, 5 Dec 2020 22:46:56 +0100 Subject: [PATCH 05/49] Update utils.py --- tests/utils.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/utils.py b/tests/utils.py index eba08ee0..6e7978e5 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -51,3 +51,16 @@ def convert_to_seconds(millis, return_with_seconds=True): return (millis/1000)%60 + " sekunder" else: return (millis/1000)%60 + +def is_sitemap(content): + """Check a string to see if its content is a sitemap or siteindex. + + Attributes: content (string) + """ + try: + if 'www.sitemaps.org/schemas/sitemap/' in content or ' Date: Sat, 5 Dec 2020 22:47:02 +0100 Subject: [PATCH 06/49] Create standard_files.py --- tests/standard_files.py | 97 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 tests/standard_files.py diff --git a/tests/standard_files.py b/tests/standard_files.py new file mode 100644 index 00000000..4e6dd09f --- /dev/null +++ b/tests/standard_files.py @@ -0,0 +1,97 @@ +#-*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * + +### DEFAULTS +request_timeout = config.http_request_timeout +googlePageSpeedApiKey = config.googlePageSpeedApiKey + +def run_test(url): + """ + Looking for: + * robots.txt + * at least one sitemap/siteindex mentioned in robots.txt + * a RSS feed mentioned in the page's meta + """ + o = urllib.parse.urlparse(url) + parsed_url = '{0}://{1}/'.format(o.scheme, o.netloc) + robots_content = httpRequestGetContent(parsed_url + 'robots.txt') + + review = '' + return_dict = dict() + return_dict["num_sitemaps"] = 0 + points = 5.0 + + if robots_content == None or ('user-agent' not in robots_content.lower() and 'disallow' not in robots_content.lower() and 'allow' not in robots_content.lower()): + points -= 3 + review += '* robots.txt saknas, får inte lov att hämtas eller har inte förväntat innehåll.\n' + return_dict['robots.txt'] = 'missing content' + else: + review += '* robots.txt verkar ok.\n' + return_dict['robots.txt'] = 'ok' + + if 'sitemap:' not in robots_content.lower(): + points -= 2 + review += '* Sitemap anges inte i robots.txt\n' + return_dict['sitemap'] = 'not in robots.txt' + else: + review += '* Sitemap finns omnämnd i robots.txt\n' + return_dict['sitemap'] = 'ok' + + smap_pos = robots_content.lower().find('sitemap') + smaps = robots_content[smap_pos:].split('\n') + found_smaps = [] + for line in smaps: + if 'sitemap:' in line.lower(): + found_smaps.append(line.lower().replace('sitemap:', '').strip()) + + return_dict["num_sitemaps"] = len(found_smaps) + + if len(found_smaps) > 0: + return_dict["sitemaps"] = found_smaps + smap_content = httpRequestGetContent(found_smaps[0]) + + if not is_sitemap(smap_content): + points -= 1 + review += '* Sitemap verkar vara trasig.\n' + return_dict['sitemap_check'] = '\'{0}\' seem to be broken'.format(found_smaps[0]) + else: + review += '* Sitemap verkar fungera.\n' + return_dict['sitemap_check'] = '\'{0}\' seem ok'.format(found_smaps[0]) + + # TODO: validate first feed + headers = {'user-agent': config.useragent} + request = requests.get(url, allow_redirects=True, headers=headers, timeout=request_timeout) + + soup = BeautifulSoup(request.text, 'lxml') + #feed = soup.find_all(rel='alternate') + feed = soup.find_all("link", {"type" : "application/rss+xml"}) + + if len(feed) == 0: + points -= 0.5 + review += '* RSS-prenumeration saknas i meta.\n' + return_dict['feed'] = 'not in meta' + return_dict['num_feeds'] = len(feed) + elif len(feed) > 0: + review += '* RSS-prenumeration hittad.\n' + return_dict['feed'] = 'found in meta' + return_dict['num_feeds'] = len(feed) + tmp_feed = [] + for single_feed in feed: + tmp_feed.append(single_feed.get('href')) + + return_dict['feeds'] = tmp_feed + + if points < 1: + points = 1 + + return (points, review, return_dict) From e70b0b23c3f04fa09790d4be5ed22321da43d518 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sat, 5 Dec 2020 22:47:52 +0100 Subject: [PATCH 07/49] Changed if-statements to not state 'is' --- tests/lighthouse_a11y.py | 6 +++--- tests/lighthouse_best_practice.py | 6 +++--- tests/lighthouse_pwa.py | 6 +++--- tests/lighthouse_seo.py | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/lighthouse_a11y.py b/tests/lighthouse_a11y.py index de6cd785..eacb2f12 100644 --- a/tests/lighthouse_a11y.py +++ b/tests/lighthouse_a11y.py @@ -51,7 +51,7 @@ def run_test(url, strategy='mobile', category='accessibility'): score = score + int(json_content['lighthouseResult']['audits'][item]['score']) - if int(json_content['lighthouseResult']['audits'][item]['score']) is 0: + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: fails += 1 fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] except: @@ -81,11 +81,11 @@ def run_test(url, strategy='mobile', category='accessibility'): review += '* Antal problem med tillgänglighet: {} st\n'.format(fails) - if fails is not 0: + if fails != 0: review += '\nTillgänglighets­problem:\n' for key, value in return_dict.items(): - if value is 0: + if value == 0: review += '* {}\n'.format(fail_dict[key]) #print(key) diff --git a/tests/lighthouse_best_practice.py b/tests/lighthouse_best_practice.py index 101ac320..f3a2111e 100644 --- a/tests/lighthouse_best_practice.py +++ b/tests/lighthouse_best_practice.py @@ -51,7 +51,7 @@ def run_test(url, strategy='mobile', category='best-practices'): score = score + int(json_content['lighthouseResult']['audits'][item]['score']) - if int(json_content['lighthouseResult']['audits'][item]['score']) is 0: + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: fails += 1 fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] except: @@ -81,11 +81,11 @@ def run_test(url, strategy='mobile', category='best-practices'): review += '* Antal problem med god praxis: {} st\n'.format(fails) - if fails is not 0: + if fails != 0: review += '\nProblem:\n' for key, value in return_dict.items(): - if value is 0: + if value == 0: review += '* {}\n'.format(fail_dict[key]) #print(key) diff --git a/tests/lighthouse_pwa.py b/tests/lighthouse_pwa.py index 6ab9281f..1a63aa30 100644 --- a/tests/lighthouse_pwa.py +++ b/tests/lighthouse_pwa.py @@ -51,7 +51,7 @@ def run_test(url, strategy='mobile', category='pwa'): score = score + int(json_content['lighthouseResult']['audits'][item]['score']) - if int(json_content['lighthouseResult']['audits'][item]['score']) is 0: + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: fails += 1 fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] except: @@ -81,11 +81,11 @@ def run_test(url, strategy='mobile', category='pwa'): review += '* Antal problem med praxis för progressiva webbappar: {} st\n'.format(fails) - if fails is not 0: + if fails != 0: review += '\nProblem:\n' for key, value in return_dict.items(): - if value is 0: + if value == 0: review += '* {}\n'.format(fail_dict[key]) #print(key) diff --git a/tests/lighthouse_seo.py b/tests/lighthouse_seo.py index f55be944..607bb88c 100644 --- a/tests/lighthouse_seo.py +++ b/tests/lighthouse_seo.py @@ -51,7 +51,7 @@ def run_test(url, strategy='mobile', category='seo'): score = score + int(json_content['lighthouseResult']['audits'][item]['score']) - if int(json_content['lighthouseResult']['audits'][item]['score']) is 0: + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: fails += 1 fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] except: @@ -81,11 +81,11 @@ def run_test(url, strategy='mobile', category='seo'): review += '* Antal problem med god praxis: {} st\n'.format(fails) - if fails is not 0: + if fails != 0: review += '\nProblem:\n' for key, value in return_dict.items(): - if value is not None and value < 1: + if value != None and value < 1: review += '* {}\n'.format(fail_dict[key]) #print(key) From effa153b0e278608df56c56f6d2915239d7f904a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sat, 5 Dec 2020 22:48:18 +0100 Subject: [PATCH 08/49] Update default.py A couple of new tests have been added :) --- default.py | 43 +++++++++++++++++++++++++++++++++++++------ 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/default.py b/default.py index 88e01eca..d222fc61 100644 --- a/default.py +++ b/default.py @@ -8,7 +8,7 @@ TEST_ALL = -1 -(TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_UNKNOWN_04, TEST_UNKNOWN_05, TEST_HTML, TEST_CSS, TEST_UNKNOWN_08, TEST_UNKNOWN_09, TEST_UNKNOWN_10, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_UNKNOWN_17, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) +(TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_GOOGLE_LIGHTHOUSE_SEO, TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, TEST_HTML, TEST_CSS, TEST_GOOGLE_LIGHTHOUSE_PWA, TEST_STANDARD_FILES, TEST_GOOGLE_LIGHTHOUSE_A11Y, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_UNKNOWN_17, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last_hours=24, order_by='title ASC'): """ @@ -41,9 +41,19 @@ def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last elif test_type == 7: from tests.w3c_validate_css import run_test elif test_type == 20: - from tests.privacy_webbhollen import run_test + from tests.privacy_webbkollen import run_test elif test_type == 1: from tests.lighthouse import run_test + elif test_type == 4: + from tests.lighthouse_seo import run_test + elif test_type == 5: + from tests.lighthouse_best_practice import run_test + elif test_type == 8: + from tests.lighthouse_pwa import run_test + elif test_type == 9: + from tests.standard_files import run_test + elif test_type == 10: + from tests.lighthouse_a11y import run_test the_test_result = run_test(website) @@ -80,8 +90,20 @@ def testing(sites, test_type= TEST_ALL, show_reviews= False): tests = list() ############## if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE): - print(_('TEXT_TEST_GOOGLE_PAGESPEED')) + print(_('TEST_GOOGLE_LIGHTHOUSE')) tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y): + print(_('TEST_GOOGLE_LIGHTHOUSE_A11Y')) + tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_SEO): + print(_('TEST_GOOGLE_LIGHTHOUSE_SEO')) + tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_SEO, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_PWA): + print(_('TEST_GOOGLE_LIGHTHOUSE_PWA')) + tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE): + print(_('TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) + tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): print(_('TEXT_TEST_PAGE_NOT_FOUND')) tests.extend(testsites(sites, test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) @@ -94,16 +116,25 @@ def testing(sites, test_type= TEST_ALL, show_reviews= False): if (test_type == TEST_ALL or test_type == TEST_WEBBKOLL): print(_('TEXT_TEST_WEBBKOLL')) tests.extend(testsites(sites, test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_STANDARD_FILES): + print(_('TEXT_TEST_STANDARD_FILES')) + tests.extend(testsites(sites, test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) + return tests def validate_test_type(test_type): - if test_type != TEST_HTML and test_type != TEST_PAGE_NOT_FOUND and test_type != TEST_CSS and test_type != TEST_WEBBKOLL and test_type != TEST_GOOGLE_LIGHTHOUSE: + if test_type != TEST_HTML and test_type != TEST_PAGE_NOT_FOUND and test_type != TEST_CSS and test_type != TEST_WEBBKOLL and test_type != TEST_GOOGLE_LIGHTHOUSE and test_type != TEST_GOOGLE_LIGHTHOUSE_PWA and test_type != TEST_GOOGLE_LIGHTHOUSE_A11Y and test_type != TEST_GOOGLE_LIGHTHOUSE_SEO and test_type != TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE and test_type != TEST_STANDARD_FILES: print(_('TEXT_TEST_VALID_ARGUMENTS')) - print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_PAGESPEED')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) print(_('TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND')) print(_('TEXT_TEST_VALID_ARGUMENTS_HTML')) print(_('TEXT_TEST_VALID_ARGUMENTS_CSS')) print(_('TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL')) + print(_('TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES')) return -2 else: return test_type @@ -118,7 +149,7 @@ def main(argv): Options and arguments: -h/--help\t\t\t: Help information on how to use script -u/--url \t\t: website url to test against - -t/--test <1/2/6/7/20>\t: runs ONE specific test against website(s) + -t/--test <1/2/4/5/6/7/8/9/10/20>\t: runs ONE specific test against website(s) -r/--review\t\t\t: show reviews in terminal -i/--input \t: input file path (.json/.sqlite) -o/--output \t: output file path (.json/.csv/.sql/.sqlite) From 83ce50e5783f0fde91e2f515edad596ce613f9ae Mon Sep 17 00:00:00 2001 From: Mattias Date: Sun, 6 Dec 2020 21:21:04 +0100 Subject: [PATCH 09/49] updated mo files for translations (we seem to still be missing some strings in both languages) --- locales/en/LC_MESSAGES/webperf-core.mo | Bin 3009 -> 4149 bytes locales/sv/LC_MESSAGES/webperf-core.mo | Bin 3052 -> 4228 bytes 2 files changed, 0 insertions(+), 0 deletions(-) diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 04e822fc9eea11a8febfda27e6e5d3f5d8c06be5..edc5a59e9a93fb279395b993b8e4a99346bea877 100644 GIT binary patch delta 1371 zcmaLVOH30%7zgm7MX;?151up{6EEt?$I19g{EuyHAq`D7zxn3-vh#h@eO!5;rrtRU z7YKe$cxv(NFW^!3cgjRaBf`_L2F^iVdlOc{C$JvAffevi#%*Ro9EjUsDeQq|@NmWv zi1Sh;jzu{V^ROD;f+g?)r0^Ns4L`y?)n*Ck`of{o8MLXaaB$a3poBW#6yLK&ijXwU_xVF=!V!|*5c zK^fP_y$-wKTQ~r>;<^sP6Oa#l1CBIf@f-^){05ugZ^#>}w-d4p`XT=|&Sks`c_a5A z|0Y)8cK8zV|M(N!2Y=Yvqk^R}Um)O{k;=FGLrorUuz=CFDyWPM`Nn2euK zYBZ=N#^Z^zk*TQ09u~bW?GjuB+(CErr*NeB(U?LJVbTojvYIoi` zJ}b|5<<0W3k!6>yarVn%cX&`0HX>7Fk;E7s*JD}|Y0AD@%b3aLN{N=ed0Ni73DRLu x(V4eu&0xIFmA5QCS13IfFLP(h+H&rRe@w`BWwSX+KGq>yyTgO*x^a@Wp})ZGD1ZO} delta 594 zcmYk&K}eHf7{Kvo+c)vs<~EmPri;&+tcI{1a#kXtv{s|Ts0kvZ==vA~al;KfbXX$V zWl-=^bdyfic?c2~coM{eAVPJc-|u-I-tT!I-c4(#yY@R|ycgDW zb|3pxCzsZ?Lu7#bJNDu+-o*cS4I@sGejLTC_yR*%ML)j5AbvRaGwOS`uv4TaM+7|- zPOt+VuGTOC)D2>&FS>{Nf@i4D7cq>h7{gC^1NZS3HZg)-Mr)uEOfhr@XQ{t)>pnG+ zzXVSxc-lmUFo$<>1%3Dp_4KUul5wGAjdhq~<@fS{D$Rm=(NqmHL zjE@l<5)7h?zX;_vcHQ42*{OL-@M;e2ks56waE7js+!CJQGE5)j9y-KLOZ%KtsubG`I q*p;$vzf@&kTzUK@_1YIyd;a0Bgn8demr8{N+qyXPH87-_fy@~ZIaZAT diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 02c37e7eb95df114697be88a3d20e8efe2c05e55..399089776d7071317c00579992e90dd6ad92491f 100644 GIT binary patch delta 1453 zcmajdUr19?90%~@=HD%K6EjPztM*5ob~l^0i3v-u+jKVdUa2r$lU?Vfxx2Po>C;8< zu_!w9R#Ef^d+I@m=piVnr@+U8AgG5T=)D(%zQ+wW$FK|A=lst3-E+?GxBcF?(xiW_ zDZWea9Koj%pS~g%WuJ=%LRt_`!v?qvS?+yU58uHi_yyL%zZq+dgw!Cm!7A7fEpRmB z48(alQ8BP$;Rf6fAHYiZ3UcrRY=b}HVQ4K*>ws+IamX%mDdQ4k7r6u3e$U}SxDIXb z8??j95<=WKpY&tEV`4GmDm;YvGi-%_GIFJaG$ZyyR#^=8qgy(hLj0yI-H8L`gm@A6 zK@P4!2Yd>L;U~BkHdml19qn>q(2fai4k2gz2NwUsX=NoA2RI@h;xYJM3#W%f1_qIqX~~=T+5cOy+W4 z)Rb5(y8a{{jbSrcqZU&K*Ih6(szz{3cu`5Qq*hnKq(oAU;wmZS8h2fuoed`vEV<2D zFgdmU^74YJsYx}VDGPE^iAPyBePKFnNhy&qH>bqoDNc!NavNKyabp{CtIT9%PKn7$ zLW|7plrxo3n_19v%G&M+>C`xfy`uO3rL)ZLf{EG6S&pk<4hq-vDrd>gg2@F6XURQC f_DQ1wHtxdMX?Jq1`Hf(UZY=KX1ABu$GY|a(;B7`E delta 604 zcmZ9|PbkA-7{Kwz_8UgF(fpZz{fq;P{Hw8q#7rD44vHN7SyI}Rxw!0Lk@Fi@*@gQq zJ0M2~7s^dSi6Y4r<@-jFx8M7Ip7-hfz0cE|ta&a?To)PJ!YF4}GN1M4&p6BysbznJ z6?ltv_=#ocwTRSU7na~O7GWG6xQQ;@`Eeh0oeUO=B;=C7L*fAoFw6Sgj1%<*KGcN* zs0)mtKEH@nxQ;&D!)iRkdVE4Jx{dFKIw@IaT$v- zg;z9}L0xFu&RIM`J~?I4FLQyq!57R&Tb@W87Gf_BU^^zUA2Zn8P4GeB!$$rhlnE@w zS=1M;vFHLDsGl;0otVy7la>SJu^!vQeWBoZI2asIVZ*O>jk!OW8>2$K*@n_{*>Q{V z+8?d{=8kk(-krtWVm6tfSZrY_VuoiHA}h<0$h2dx$ssgT&FK$p Date: Sun, 6 Dec 2020 21:54:24 +0100 Subject: [PATCH 10/49] adjusted translation keys so they match and are cosistent --- default.py | 10 +++++----- locales/en/LC_MESSAGES/webperf-core.mo | Bin 4149 -> 4184 bytes locales/en/LC_MESSAGES/webperf-core.po | 14 +++++++------- locales/sv/LC_MESSAGES/webperf-core.mo | Bin 4228 -> 4263 bytes locales/sv/LC_MESSAGES/webperf-core.po | 14 +++++++------- locales/webperf-core.pot | 12 ++++++------ tests/page_not_found.py | 12 ++++++------ 7 files changed, 31 insertions(+), 31 deletions(-) diff --git a/default.py b/default.py index d222fc61..611e675e 100644 --- a/default.py +++ b/default.py @@ -90,19 +90,19 @@ def testing(sites, test_type= TEST_ALL, show_reviews= False): tests = list() ############## if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE): - print(_('TEST_GOOGLE_LIGHTHOUSE')) + print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE')) tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y): - print(_('TEST_GOOGLE_LIGHTHOUSE_A11Y')) + print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y')) tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_SEO): - print(_('TEST_GOOGLE_LIGHTHOUSE_SEO')) + print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO')) tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_SEO, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_PWA): - print(_('TEST_GOOGLE_LIGHTHOUSE_PWA')) + print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA')) tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE): - print(_('TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) + print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): print(_('TEXT_TEST_PAGE_NOT_FOUND')) diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index edc5a59e9a93fb279395b993b8e4a99346bea877..06144fd3c2a383369d76e31efb5596958bbf5283 100644 GIT binary patch delta 741 zcmYMyJ!lj`6u|MfyLXB4Ho4^d;9|Zu@p5OPm@CeF5DO))a+nXiEP9PD35cROJY!>_ ztQMl!Brr`tC8(Vs7?(7PskG977J_0QLW&fEl_37-T(Ftnd&3O#-rHSuzB!ATASEK_ zO_3(7VlxJji`Y*76kBi&Tk$g3#8_M;k9i!y863bzksq;>{5R?b_SFUd zg0VW09yaDsPh$?l{` z2D?xbdErOV-@)dsY%vRkN!LE!C$EVjReamwxjB|mej1-ZIwM=-8DOFJ~4CM zW!6Cc08flVktEePu9E=Pvhm5|F=r+ z#PQPU>F{Q3!V1S5dadw7ec4dU_Mqx$=uy}0!{Jx^o?-Vplh>U2i z*^HBz!-u#L-(nYj#CH4}yUq|($bKBe-FPGR8Ez$Cz#{%M!$x4Q z5b37iqCVMWbP^&V2IO;1jSuEmvWEO1?!?=;4`QY$_OPEWJzB Rt^Vk%bt|Eh=w-`H;vdy*ZiWB= diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index 70049f39..5370fca4 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -16,27 +16,27 @@ msgstr "" #: tests/page_not_found.py:35 -msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" +msgid "TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE" msgstr "* Wrong status code. Got {0} when 404 would be correct.\n" #: tests/page_not_found.py:55 -msgid "TEST_404_REVIEW_NO_TITLE" +msgid "TEXT_TEST_404_REVIEW_NO_TITLE" msgstr "* Found no page title in the page metadata.\n" #: tests/page_not_found.py:65 -msgid "TEST_404_REVIEW_MAIN_HEADER" +msgid "TEXT_TEST_404_REVIEW_MAIN_HEADER" msgstr "* Found no headline (h1)\n" #: tests/page_not_found.py:124 -msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" +msgid "TEXT_TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" msgstr "* Seems to lack text describing that an error has occurred (in Swedish).\n" #: tests/page_not_found.py:131 -msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" +msgid "TEXT_TEST_404_REVIEW_ERROR_MSG_UNDER_150" msgstr "* Text content length is below 150 characters, indicating that the user is not being referred.\n" #: tests/page_not_found.py:134 -msgid "TEST_REVIEW_NO_REMARKS" +msgid "TEXT_TEST_REVIEW_NO_REMARKS" msgstr "* No remarks" #: default.py:24 @@ -68,7 +68,7 @@ msgid "TEXT_TESTING_START_HEADER" msgstr "### {0} ###" #: default.py:93 -msgid "TEST_GOOGLE_LIGHTHOUSE" +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE" msgstr "###############################\nRunning test: 0 - Google Lighthouse Performance" #: default.py:96 diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 399089776d7071317c00579992e90dd6ad92491f..6a9f4de2133162c41e4d51f34531dbe9d2bd5e6a 100644 GIT binary patch delta 732 zcmYMyPe>GT6u|MfGrKF>u75LGv)r9s$=Vg&au2&8Lg?gwqunWm&_M|j5%DM@OoI26 zA3=~Ev_t1wx_Iys6j?_1x_Bw@U=W_NL*L)JApH1!-Wz^D=DjzwZoRfP+Pnr4nKDFb za0%p(@ZZ?y33-*H{d%_*R>pDZus4b4IPk%Ss)UMgCB7v}K@i(Y^ptkIMAc}MK=eG*> v*zLkdv9#>JW0rEEM`mfSs^6zRM!J;|iYu!+UHTh&=2MAKQ#skK+4ubi0GeVK delta 763 zcmYMxPe>GD7{~EvcXnIL-PH}XG|O3C%iJjc9V~4ja9GD(TihAzU^W>dJPm@E zKuCHCI#iebAG$b~pa@FvBzP?7Qm2ldvWr3A*Ino`pZE97@Xq@@@9Xq(dNt#BiO6hN zO}J7Q8aNG`|%}C;8$$M&iypSw1%;lY{o^{LJCMp z9Mlt^^;zhS7O@^zaR8s-6mDSw)6C7md35nJ4q^*q8p8=3#RsT6-bDT5T|9s%8mbKr zpq}_NcHw!Sg&y5=JcV1R53-B;4(qtsChSIi$LDwqV?hVC9X_twjCM6*bRTkDZ>Vp` zD_fUr>#}#TQzgW0kxkXqw a9sG#hFw|NytM11KgIMBGC|FDW4*dh~f^HfB diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index 8a36e9af..b2a28b63 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -16,27 +16,27 @@ msgstr "" #: tests/page_not_found.py:35 -msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" +msgid "TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE" msgstr "* Fel statuskod. Fick {0} när 404 vore korrekt.\n" #: tests/page_not_found.py:55 -msgid "TEST_404_REVIEW_NO_TITLE" +msgid "TEXT_TEST_404_REVIEW_NO_TITLE" msgstr "* Hittade ingen titel på sidan\n" #: tests/page_not_found.py:65 -msgid "TEST_404_REVIEW_MAIN_HEADER" +msgid "TEXT_TEST_404_REVIEW_MAIN_HEADER" msgstr "* Hittade ingen huvudrubrik (h1)\n" #: tests/page_not_found.py:124 -msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" +msgid "TEXT_TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" msgstr "* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n" #: tests/page_not_found.py:131 -msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" +msgid "TEXT_TEST_404_REVIEW_ERROR_MSG_UNDER_150" msgstr "* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n" #: tests/page_not_found.py:134 -msgid "TEST_REVIEW_NO_REMARKS" +msgid "TEXT_TEST_REVIEW_NO_REMARKS" msgstr "* Inga anmärkningar." #: default.py:24 @@ -68,7 +68,7 @@ msgid "TEXT_TESTING_START_HEADER" msgstr "### {0} ###" #: default.py:93 -msgid "TEST_GOOGLE_LIGHTHOUSE" +msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE" msgstr "###############################\nKör test: 0 - Google Lighthouse prestanda" #: default.py:96 diff --git a/locales/webperf-core.pot b/locales/webperf-core.pot index 624ac38a..0c3fc9b5 100644 --- a/locales/webperf-core.pot +++ b/locales/webperf-core.pot @@ -16,27 +16,27 @@ msgstr "" #: tests/page_not_found.py:35 -msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" +msgid "TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE" msgstr "" #: tests/page_not_found.py:55 -msgid "TEST_404_REVIEW_NO_TITLE" +msgid "TEXT_TEST_404_REVIEW_NO_TITLE" msgstr "" #: tests/page_not_found.py:65 -msgid "TEST_404_REVIEW_MAIN_HEADER" +msgid "TEXT_TEST_404_REVIEW_MAIN_HEADER" msgstr "" #: tests/page_not_found.py:124 -msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" +msgid "TEXT_TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" msgstr "" #: tests/page_not_found.py:131 -msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" +msgid "TEXT_TEST_404_REVIEW_ERROR_MSG_UNDER_150" msgstr "" #: tests/page_not_found.py:134 -msgid "TEST_REVIEW_NO_REMARKS" +msgid "TEXT_TEST_REVIEW_NO_REMARKS" msgstr "" #: default.py:24 diff --git a/tests/page_not_found.py b/tests/page_not_found.py index a3d598ee..d3ded154 100644 --- a/tests/page_not_found.py +++ b/tests/page_not_found.py @@ -32,7 +32,7 @@ def run_test(url): if code == 404: points += 2.0 else: - review = review + _('TEST_404_REVIEW_WRONG_STATUS_CODE').format(request.status_code) #'* Fel statuskod. Fick {0} när 404 vore korrekt.\n'.format(request.status_code) + review = review + _('TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE').format(request.status_code) #'* Fel statuskod. Fick {0} när 404 vore korrekt.\n'.format(request.status_code) result_dict['status_code'] = code @@ -52,7 +52,7 @@ def run_test(url): if title: result_dict['page_title'] = title.string else: - review = review + _('TEST_404_REVIEW_NO_TITLE') #'* hittade ingen titel på sidan\n' + review = review + _('TEXT_TEST_404_REVIEW_NO_TITLE') #'* hittade ingen titel på sidan\n' except: print('Error getting page title!\nMessage:\n{0}'.format(sys.exc_info()[0])) @@ -62,7 +62,7 @@ def run_test(url): if h1: result_dict['h1'] = h1.string else: - review = review + _('TEST_404_REVIEW_MAIN_HEADER') #'* hittade ingen huvud rubrik (h1)\n' + review = review + _('TEXT_TEST_404_REVIEW_MAIN_HEADER') #'* hittade ingen huvud rubrik (h1)\n' except: print('Error getting H1!\nMessage:\n{0}'.format(sys.exc_info()[0])) @@ -121,17 +121,17 @@ def run_test(url): if found_match == False: - review = review + _('TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG') #'* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n' + review = review + _('TEXT_TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG') #'* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n' ## hur långt är inehållet soup = BeautifulSoup(request.text, 'html.parser') if len(soup.get_text()) > 150: points += 1.5 else: - review = review + _('TEST_404_REVIEW_ERROR_MSG_UNDER_150') #'* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n' + review = review + _('TEXT_TEST_404_REVIEW_ERROR_MSG_UNDER_150') #'* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n' if len(review) == 0: - review = _('TEST_REVIEW_NO_REMARKS') + review = _('TEXT_TEST_REVIEW_NO_REMARKS') if points == 0: points = 1.0 From 8afd485d0a4e13e77566e628b19f598d92af47db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Wed, 9 Dec 2020 21:04:26 +0100 Subject: [PATCH 11/49] Create yellow_lab_tools.py --- tests/yellow_lab_tools.py | 75 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 tests/yellow_lab_tools.py diff --git a/tests/yellow_lab_tools.py b/tests/yellow_lab_tools.py new file mode 100644 index 00000000..9a5cafa6 --- /dev/null +++ b/tests/yellow_lab_tools.py @@ -0,0 +1,75 @@ +#-*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * + +### DEFAULTS +request_timeout = config.http_request_timeout +googlePageSpeedApiKey = config.googlePageSpeedApiKey + +def run_test(url, device='phone'): + """ + Analyzes URL with Yellow Lab Tools docker image. + Devices might be; phone, tablet, desktop + """ + r = requests.post('https://yellowlab.tools/api/runs', data = {'url':url, "waitForResponse":'true', 'device': device}) + + result_url = r.url + test_id = result_url.rsplit('/', 1)[1] + + result_json = httpRequestGetContent('https://yellowlab.tools/api/results/{0}?exclude=toolsResults'.format(test_id)) + result_dict = json.loads(result_json) + + return_dict = {} + + for key in result_dict['scoreProfiles']['generic'].keys(): + if key == 'globalScore': + return_dict[key] = result_dict['scoreProfiles']['generic'][key] + + for key in result_dict['scoreProfiles']['generic']['categories'].keys(): + return_dict[key] = result_dict['scoreProfiles']['generic']['categories'][key]['categoryScore'] + + review = '' + yellow_lab = return_dict["globalScore"] + + rating = (int(yellow_lab) / 20) + 0.5 + + if rating > 5: + rating = 5 + elif rating < 1: + rating = 1 + + if rating == 5: + review = '* Webbplatsen är välbyggd!\n' + elif rating >= 4: + review = '* Webbplatsen är bra.\n' + elif rating >= 3: + review = '* Helt ok.\n' + elif rating >= 2: + review = '* Webbplatsen är rätt långsam eller har dålig frontend-kod.\n' + elif rating <= 1: + review = '* Väldigt dåligt betyg enligt Yellow Lab Tools!\n' + + review += '* Övergripande betyg: {} av 100\n'.format(return_dict["globalScore"]) + review += '* Testat för devicetyp: {}\n'.format(device) + review += '* pageWeight: {}\n'.format(return_dict["pageWeight"]) + review += '* requests: {}\n'.format(return_dict["requests"]) + review += '* domComplexity: {}\n'.format(return_dict["domComplexity"]) + review += '* domManipulations: {}\n'.format(return_dict["domManipulations"]) + review += '* scroll: {}\n'.format(return_dict["scroll"]) + review += '* badJavascript: {}\n'.format(return_dict["badJavascript"]) + review += '* jQuery: {}\n'.format(return_dict["jQuery"]) + review += '* cssComplexity: {}\n'.format(return_dict["cssComplexity"]) + review += '* badCSS: {}\n'.format(return_dict["badCSS"]) + review += '* fonts: {}\n'.format(return_dict["fonts"]) + review += '* serverConfig: {}\n'.format(return_dict["serverConfig"]) + + return (rating, review, return_dict) \ No newline at end of file From b05c5d6fb48265c9b443cf3a369e670c00af3e13 Mon Sep 17 00:00:00 2001 From: Mattias Date: Wed, 9 Dec 2020 21:51:31 +0100 Subject: [PATCH 12/49] translation + new test work - changed so -t will show help text - argument help are now translateable - fixed swedish translation (wrong test number for one test) - added yellow lab tool test - changed so we use our variables instead of numbers in code --- default.py | 46 ++++++++++++++++--------- locales/en/LC_MESSAGES/webperf-core.mo | Bin 4184 -> 4420 bytes locales/en/LC_MESSAGES/webperf-core.po | 8 ++++- locales/sv/LC_MESSAGES/webperf-core.mo | Bin 4263 -> 4503 bytes locales/sv/LC_MESSAGES/webperf-core.po | 8 ++++- 5 files changed, 43 insertions(+), 19 deletions(-) diff --git a/default.py b/default.py index 611e675e..eb2be739 100644 --- a/default.py +++ b/default.py @@ -8,7 +8,7 @@ TEST_ALL = -1 -(TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_GOOGLE_LIGHTHOUSE_SEO, TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, TEST_HTML, TEST_CSS, TEST_GOOGLE_LIGHTHOUSE_PWA, TEST_STANDARD_FILES, TEST_GOOGLE_LIGHTHOUSE_A11Y, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_UNKNOWN_17, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) +(TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_GOOGLE_LIGHTHOUSE_SEO, TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, TEST_HTML, TEST_CSS, TEST_GOOGLE_LIGHTHOUSE_PWA, TEST_STANDARD_FILES, TEST_GOOGLE_LIGHTHOUSE_A11Y, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_YELLOW_LAB_TOOLS, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last_hours=24, order_by='title ASC'): """ @@ -34,26 +34,28 @@ def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last the_test_result = None try: - if test_type == 2: + if test_type == TEST_PAGE_NOT_FOUND: from tests.page_not_found import run_test - elif test_type == 6: + elif test_type == TEST_HTML: from tests.w3c_validate_html import run_test - elif test_type == 7: + elif test_type == TEST_CSS: from tests.w3c_validate_css import run_test - elif test_type == 20: + elif test_type == TEST_WEBBKOLL: from tests.privacy_webbkollen import run_test - elif test_type == 1: + elif test_type == TEST_GOOGLE_LIGHTHOUSE: from tests.lighthouse import run_test - elif test_type == 4: + elif test_type == TEST_GOOGLE_LIGHTHOUSE_SEO: from tests.lighthouse_seo import run_test - elif test_type == 5: + elif test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE: from tests.lighthouse_best_practice import run_test - elif test_type == 8: + elif test_type == TEST_GOOGLE_LIGHTHOUSE_PWA: from tests.lighthouse_pwa import run_test - elif test_type == 9: + elif test_type == TEST_STANDARD_FILES: from tests.standard_files import run_test - elif test_type == 10: + elif test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y: from tests.lighthouse_a11y import run_test + elif test_type == TEST_YELLOW_LAB_TOOLS: + from tests.yellow_lab_tools import run_test the_test_result = run_test(website) @@ -119,11 +121,14 @@ def testing(sites, test_type= TEST_ALL, show_reviews= False): if (test_type == TEST_ALL or test_type == TEST_STANDARD_FILES): print(_('TEXT_TEST_STANDARD_FILES')) tests.extend(testsites(sites, test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_YELLOW_LAB_TOOLS): + print(_('TEXT_TEST_YELLOW_LAB_TOOLS')) + tests.extend(testsites(sites, test_type=TEST_YELLOW_LAB_TOOLS, show_reviews=show_reviews)) return tests def validate_test_type(test_type): - if test_type != TEST_HTML and test_type != TEST_PAGE_NOT_FOUND and test_type != TEST_CSS and test_type != TEST_WEBBKOLL and test_type != TEST_GOOGLE_LIGHTHOUSE and test_type != TEST_GOOGLE_LIGHTHOUSE_PWA and test_type != TEST_GOOGLE_LIGHTHOUSE_A11Y and test_type != TEST_GOOGLE_LIGHTHOUSE_SEO and test_type != TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE and test_type != TEST_STANDARD_FILES: + if test_type != TEST_HTML and test_type != TEST_PAGE_NOT_FOUND and test_type != TEST_CSS and test_type != TEST_WEBBKOLL and test_type != TEST_GOOGLE_LIGHTHOUSE and test_type != TEST_GOOGLE_LIGHTHOUSE_PWA and test_type != TEST_GOOGLE_LIGHTHOUSE_A11Y and test_type != TEST_GOOGLE_LIGHTHOUSE_SEO and test_type != TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE and test_type != TEST_STANDARD_FILES and test_type != TEST_YELLOW_LAB_TOOLS: print(_('TEXT_TEST_VALID_ARGUMENTS')) print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE')) print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO')) @@ -135,6 +140,7 @@ def validate_test_type(test_type): print(_('TEXT_TEST_VALID_ARGUMENTS_CSS')) print(_('TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL')) print(_('TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES')) + print(_('TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS')) return -2 else: return test_type @@ -167,8 +173,15 @@ def main(argv): add_url = '' delete_url = '' langCode = 'en' + language = False global _ + # add support for default (en) language + language = gettext.translation('webperf-core', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + try: opts, args = getopt.getopt(argv,"hu:t:i:o:rA:D:L:",["help","url","test", "input", "output", "review", "report", "addUrl", "deleteUrl", "language"]) except getopt.GetoptError: @@ -207,6 +220,10 @@ def main(argv): langCode = arg foundLang = True + language = gettext.translation('webperf-core', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + if (not foundLang): # Not translateable print('Language not found, only the following languages are available:', availableLanguages) @@ -239,11 +256,6 @@ def main(argv): show_reviews = True pass - # add support for language - language = gettext.translation('webperf-core', localedir='locales', languages=[langCode]) - language.install() - _ = language.gettext - if (show_help): print(_('TEXT_COMMAND_USAGE')) sys.exit(2) diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 06144fd3c2a383369d76e31efb5596958bbf5283..81b5c033fb3603b3b2597e3da1b81212364492b2 100644 GIT binary patch delta 842 zcma*lPe>F|9Ki9Pwr;Dn>m(#w_UEOWsihWDTN_v{Oi>O_=8!8MhBY@4mmS)j{!%Rk z(M3oYx)tTMONG#-E@6kRB}H`b(ydzX+O6-yfud8x%zS>oAM@tD_nY~m-nCa(S|SgH zqmyeJmsO{;KgZ2_krvWhn80~7@JZ+k)Z^abCS1WSxQboav`$3f0CuB;+wnFsXH^~% z#3?Lc8-75AHPizV5s{sEI5dZPA=fa8_fd~~8s=}XmHZRxh5ST4VYESHFQ#xmp2Z`~ zFLwy~DZCE-fjh|CqH8T}qG&rx^gD{jRanshI=USxsY5649IP+x7LhRF?U z?!pf89!xU73=?q5G&dJ3s8_y>r?H0nFujp3;1nLj$7tg+p2H5>ID%L36uu8NXxAa~ z9G=7n$PoFA)#C);2y_Qh%_6(eK^84HFopL}@8lVF;z!iq{8wlzKLl%(0o0%37;eT1 z+SE%0qe1-f5rWkt$ delta 656 zcmYMyJ1j$C6u|LQO6$?8dc>o&@oo?uJcgiWm$MlSKSadJ{Lf_jk^h``zz+=YHviXV31d(|jykP24W-fJw9eYc5B` zNji^qjG+T}4UbUgona}auo@q+4L{I>Ww|0=?8Z7=z!p9u+XOZSF0d98=)nx?gimy1 zl{u>)wUAk?!Zp-+dq#eWmE1x_=fFh&J*#W5BphPmI%5SI5E7(dh#FC9n|D! z9mWRoC~8Lzag=VIqwfCQBGStEL7|8huMKapkvxrB@H-ZviN_-Ai;X}l-M|Ul#}0f# zKmKA5_7#gna093D4yVvT`^ItE@Dh8;zi7G!h{qthl3O`Jvvp=P6q<~L!%2(f!)l*iTG?D* XS(VOBkH26bK4dS8I~}oj!V&lb((Ogn diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index 5370fca4..8f076545 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -107,6 +107,9 @@ msgstr "###############################\nRunning test: 20 - Webbkoll" msgid "TEXT_TEST_STANDARD_FILES" msgstr "###############################\nRunning test: 9 - Standard files" +msgid "TEXT_TEST_YELLOW_LAB_TOOLS" +msgstr "###############################\nKör test: 17 - Yellow Lab Tools (Quality on frontend)" + #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Valid arguments for option -t/--test:" @@ -129,7 +132,10 @@ msgstr "-t 8\t: Google Lighthouse PWA" #: default.py:132 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" -msgstr "-t 5\t: Google Lighthouse Best PRactice" +msgstr "-t 5\t: Google Lighthouse Best Practice" + +msgid "TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS" +msgstr "-t 17\t: Yellow Lab Tools (Quality on frontend)" #: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 6a9f4de2133162c41e4d51f34531dbe9d2bd5e6a..373d41ba207a060b1be9085f672aa3bbebd6cd54 100644 GIT binary patch delta 841 zcma*l-%C?r7{KwTxy{Y=IHkl=>op@>o4IA6jTkf>K|8QwC!BgQv=&4M*N;(NRMLf1 z&_#8M5CrQYya>$n!V8m#^$!Gfk z9G+x;xlhnbVb%2q9w2Y?@9ZGqdJzv%e~d@*9qP`k<9^)4B;5Yhf0h3Idik|~B~TL&CY+D;9nNIq?^sV+#ruN^HC(jq(rsmqjVr5EvTv&H%&jr| nO1W58*D7z+xkb{2ZV delta 648 zcmYMyOG^S#6u|MLP1?)Up7vBTq8zE|u}F{tD~c2%Dhf)WRhx*GA%T5^3W*lE(H5;s zqFtLHh#s_R6+xSV76s9+i2fIahMD_2=fa(P?wQ&59eI;aRob3#wevLdgiI>?zn07* zRirVj#C3FFvfvJ?bs2QxIX2)ucH$d)u)0X33kR?Th^mkUbm0oBbz6lzjdkRws0z73wc!ius1=JCXkd{2WtpIdiGzaI*h>D5>I99J zyd&5~zKW_N+c-qGviQm=pRFQ(=C8^`2;~b)@ds6bmhyaIZOAP_hDG+5F#<1Ua2zkt zgXI+>y%@kgjN>Hk;xs;D1iNY93?^_E@30?zv}qLQu@=)9!V^>n{=m4Oz-7l$R*6fTFLxaI2@UZMsrr{S&7L= S8IJnYgZ(!Zw&^LuS@{RzhC#~! diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index b2a28b63..260aae6b 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -108,13 +108,16 @@ msgstr "###############################\nKör test: 20 - Webbkoll" msgid "TEXT_TEST_STANDARD_FILES" msgstr "###############################\nKör test: 9 - Standardfiler" +msgid "TEXT_TEST_YELLOW_LAB_TOOLS" +msgstr "###############################\nKör test: 17 - Yellow Lab Tools (Kvalitet på frontend)" + #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Giltiga argument att välja på -t/--test:" #: default.py:128 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" -msgstr "-t 0\t: Google Lighthouse prestanda" +msgstr "-t 1\t: Google Lighthouse prestanda" #: default.py:129 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" @@ -132,6 +135,9 @@ msgstr "-t 8\t: Google Lighthouse progressiv webbapp" msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" msgstr "-t 5\t: Google Lighthouse god praxis" +msgid "TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS" +msgstr "-t 17\t: Yellow Lab Tools (Kvalitet på frontend)" + #: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" msgstr "-t 2\t: 404 (sida finns inte)" From 81d480f902337c21dd6f3cc87410dae273684668 Mon Sep 17 00:00:00 2001 From: Mattias Date: Thu, 10 Dec 2020 19:39:09 +0100 Subject: [PATCH 13/49] adjusted order of test help (to number order) + also hiding all test numbers (we have a lot now) --- default.py | 8 ++++---- locales/en/LC_MESSAGES/webperf-core.mo | Bin 4420 -> 4413 bytes locales/en/LC_MESSAGES/webperf-core.po | 2 +- locales/sv/LC_MESSAGES/webperf-core.mo | Bin 4503 -> 4507 bytes locales/sv/LC_MESSAGES/webperf-core.po | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/default.py b/default.py index eb2be739..ce12151c 100644 --- a/default.py +++ b/default.py @@ -131,16 +131,16 @@ def validate_test_type(test_type): if test_type != TEST_HTML and test_type != TEST_PAGE_NOT_FOUND and test_type != TEST_CSS and test_type != TEST_WEBBKOLL and test_type != TEST_GOOGLE_LIGHTHOUSE and test_type != TEST_GOOGLE_LIGHTHOUSE_PWA and test_type != TEST_GOOGLE_LIGHTHOUSE_A11Y and test_type != TEST_GOOGLE_LIGHTHOUSE_SEO and test_type != TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE and test_type != TEST_STANDARD_FILES and test_type != TEST_YELLOW_LAB_TOOLS: print(_('TEXT_TEST_VALID_ARGUMENTS')) print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE')) + print(_('TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND')) print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO')) - print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y')) - print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA')) print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) - print(_('TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND')) print(_('TEXT_TEST_VALID_ARGUMENTS_HTML')) print(_('TEXT_TEST_VALID_ARGUMENTS_CSS')) - print(_('TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA')) print(_('TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y')) print(_('TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS')) + print(_('TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL')) return -2 else: return test_type diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 81b5c033fb3603b3b2597e3da1b81212364492b2..076f7b052f01b61cd98ed660938f346d7119c32b 100644 GIT binary patch delta 372 zcmXZXKTASk6vy#j4%N*}8-g-}2kqY=){=-pwUjiZ7~&Rng*zBG1@FB<2)g%SY9AX-G=cOE8;2z#%2?uzNiHKCjW<*^TDBfs{v=*c+mhl98$Qym) z5q{wgu0^E{JVX9057)7SqeY%@6F)G4zyC&KlLgYaNm?#iTl#{;%YayVLmH77r0Qz!(%@c%rHRDsA7HSW zNN1bDtcicXT(R1Gn@XNMpWk_MPI8{t*jwzm*im~7Nri4{QqoV4L{zl5?HI;6Ph%7d zNU1_QKp%&(fm_(b8O-%bc|5`lKA?jQ+`)K6+Qy5BYT6{cGiYh`OIh5*4ZK6%=o44* z3k@a)q;V`Ff7Uf7@D^K(JmCa>;3)q7J7l#Nn8Oj~D;Bp03HuBvoziCU2AA*!cd=Q& z3f*_ErA*q)n~cetoXML7lS-O&aycAcw9n3~zU>s&ZU4kAd&gec4qQL5OO=vW#Y^|l N_X2mS?u5U){s47vHOT+~ diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index 8f076545..ad7aecf0 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -159,5 +159,5 @@ msgstr "-t 9\t: Standard files" #: default.py:137 default.py:141 default.py:146 default.py:213 msgid "TEXT_COMMAND_USAGE" -msgstr "\n\tWebPerf Core\n\n\tUsage:\ndefault.py -u https://webperf.se\n\n\tOptions and arguments:\n\t-h/--help\t\t\t: Help information on how to use script\n\t-u/--url \t\t: website url to test against\n\t-t/--test <1/2/4/5/6/7/8/9/10/20>\t\t: runs ONE specific test against website(s)\n\t-r/--review\t\t\t: show reviews in terminal\n\t-i/--input \t\t: input file path (.json/.sqlite)\n\t-o/--output \t\t: output file path (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: website url (required in compination with -i/--input)\n\t-D/--deleteUrl \t: website url (required in compination with -i/--input)\n\t-L/--language \t: language used for output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tUsage:\ndefault.py -u https://webperf.se\n\n\tOptions and arguments:\n\t-h/--help\t\t\t: Help information on how to use script\n\t-u/--url \t\t: website url to test against\n\t-t/--test \t\t: run ONE test (use ? to list available tests)\n\t-r/--review\t\t\t: show reviews in terminal\n\t-i/--input \t\t: input file path (.json/.sqlite)\n\t-o/--output \t\t: output file path (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: website url (required in compination with -i/--input)\n\t-D/--deleteUrl \t: website url (required in compination with -i/--input)\n\t-L/--language \t: language used for output(en = default/sv)" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 373d41ba207a060b1be9085f672aa3bbebd6cd54..1c0512b65da68d35ac8786206576194e29497aa7 100644 GIT binary patch delta 395 zcmW;Gy-Px26vy#j4(3fQgKEj)DTm(Cia;U?A_{7f9Eu{axLz(^Mf7oKau#ar1!}9k z$Y_mls6U}O+WHe(`WhV0=RD^;9L`nhCiR%_Y5s(yt%x)&X)!AG;t@Jn#8Ir`2%aIK zJJJC@;21_@?7%rR*uZ(b#0C7oZT!U?ZpNh@ypF4>U5alSdCc~61fJstzTq&22c#8D zVjB0cdvFW;@EOPP6{qkEQy5D~Gnm0yJVb6*3zK+}n2?4jo@fwyCuQ&xv*-*;d$`^{ z4c&%w)~#DpX_U)uZPjs>O{w!zGp@C!UUg6WqF=J+|I1{d;<;wc6ua{R^T4m$g0X%O Sc%A!-7x*48mbH(arN|#g13<0- delta 391 zcmWm8Q7c1n7{~F)itS)bA;ra?T`*Svb7o_tT}UZ%AyeY&jI(vx9OL{WSG!tqNy-J~ z1t_gnZsclSf@`_+65RT#r|0wQspsi=NZuz~#jeIrL}WcCGAkmwPb7-_XkiJ*v4*2~ zg_Jze4)7T#G1|`!OrgS4bnq54_<ZKBUgoW@6a#%Rilg-8g+bP Ylp9q1b1zUAUa3^Cxh9-xm#tjv501+@r~m)} diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index 260aae6b..c80d3b40 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -160,5 +160,5 @@ msgstr "-t 9\t: Standardfiler" #: default.py:137 default.py:141 default.py:146 default.py:213 msgid "TEXT_COMMAND_USAGE" -msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test <1/2/4/5/6/7/8/9/10/20>\t\t: kör ett enda specifikt test mot angiven webbplats(er)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test \t\t: kör ett specifikt test (ange ? för att lista tillgängliga tester)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" From a85a6c2b7a77e346611b1293850272c46058dd27 Mon Sep 17 00:00:00 2001 From: Mattias Date: Thu, 10 Dec 2020 20:01:43 +0100 Subject: [PATCH 14/49] texts in engines has been translated --- engines/json.py | 8 ++++---- engines/sqlite.py | 4 ++-- locales/en/LC_MESSAGES/webperf-core.mo | Bin 4413 -> 4571 bytes locales/en/LC_MESSAGES/webperf-core.po | 6 ++++++ locales/sv/LC_MESSAGES/webperf-core.mo | Bin 4507 -> 4681 bytes locales/sv/LC_MESSAGES/webperf-core.po | 7 ++++++- 6 files changed, 18 insertions(+), 7 deletions(-) diff --git a/engines/json.py b/engines/json.py index d06bde4f..a18bfb35 100644 --- a/engines/json.py +++ b/engines/json.py @@ -3,12 +3,12 @@ def add_site(input_filename, url): sites = read_sites(input_filename) - print(sites) + #print(sites) id = len(sites) sites.append([id, url]) write_sites(input_filename, sites) - print("website with url: " + url + " has been added\n") + print(_('TEXT_WEBSITE_URL_ADDED').format(url)) return sites @@ -23,8 +23,8 @@ def delete_site(input_filename, url): write_sites(input_filename, tmpSites) - print("website with url: " + url + " has been deleted\n") - + print(_('TEXT_WEBSITE_URL_DELETED').format(site_url)) + return tmpSites def read_sites(input_filename): diff --git a/engines/sqlite.py b/engines/sqlite.py index c60ddff5..74ba65db 100644 --- a/engines/sqlite.py +++ b/engines/sqlite.py @@ -24,7 +24,7 @@ def add_site(input_filename, url): conn.close() - print("website with url: " + url + " has been added\n") + print(_('TEXT_WEBSITE_URL_ADDED').format(url)) return read_sites(input_filename) @@ -39,7 +39,7 @@ def delete_site(input_filename, url): conn.close() - print("website with url: " + url + " has been deleted\n") + print(_('TEXT_WEBSITE_URL_DELETED').format(site_url)) return read_sites(input_filename) diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 076f7b052f01b61cd98ed660938f346d7119c32b..e9b9a5d1c8d389894d82a09a9294d11203e8a314 100644 GIT binary patch delta 813 zcmajdK}Zx~6u|M9Zmq1Y?Fx$KnuF$QD(b~0^-#%-B8ZW$wni|l?10KD;;txGY|zDv z-l7l{1fh$9u(wbYJPPJfFaim>hj{T2{U1Bvp@YNB{NDSRZ{GL4nf;pc+RV=q&nMw% z;0kfQxue<5v6d%NLb{H{xQhWia6LtxcY#6tixn8i&%Lt|4e}v;jA^XG4GfE9WRJkl zz%Q)CKWL!elbg_j)#NeP1=NLn!E*eHI`7cUe_$E;73xBY?&b=Hv5DNoHk`o@>dPiU zD+A}Qh4(}rl6RqY<+bZatRdgVM*NQ2nQMH2es8XD1M0$_qD41jxJp+x3PkD|?s5`mDFjn$I=s(cyYNKx89qOl9!cyEq zPWi^I1&;%K*)D=>21YQ#AEX7BQ43^)*~PpSPixeQMeSEs_fTKdvWExz?dPUxneyMT zY4uxC%glPc5#PYPGnSf2JI4G(dfb?sO?Db@+uj@F@su&tARZ4orI1eMZMLqSUo6%@3! z6gXCcG$uqtONFBlB}9Kg--{P|y!Srm9Jt(b&wKBohhVvS(h-30f#Tq8DEGT6u|MQr}v-DsK}Fe99=wwv)Ck3 zlD7mA3Tt=}f1<)j&|A=kN61sY_fa=8g|#?`dfj_JU%`Fkzfm_*vCG@A30uewOyCHf zWq)}|&`#mAZ~1PK!{p~tPvxfXI36Qkz-Ii2dNLb$03#u9<2dTZE@CIsOyN3HdA`S6 zH(oA6NnT+Y`^y`GKTm+V^A+SHzxW!!Ej)??6(Uz~3>ALBllUDwu$D1)VK-jGXV{HD zFo~xbn}v_@6n?>yK@hDJslh(HfDY=3%%gtNWej7GgVz(NM17VW*norBf{(Bj7m%K1 z8TGsI4XZK03qmOSF@fo~b?8F0P&Xk-JG}7^ietg9sR#6AI%_7|x zH%y`~WEb1;2zA`0k>8__{0(&>->4H>8bl%(#wae~IP=RP!3Z69h8650_giW^m@wSL zUivdQhy~Qjd|(Gwaf<5MtRfY5KWD4G7jufxB^zkREz|{XqmTI|L%=0hJl3#)J?M1t z5gf)K?%*h%;ut>TJpSS`&Qit&Ji`_I!UV=CR}8Z_gC%T5OB3~(z diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index c80d3b40..ca0a5f3d 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -15,7 +15,12 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: tests/page_not_found.py:35 +msgid "TEXT_WEBSITE_URL_ADDED" +msgstr "webbplats med adress: {0} har blivit tillagd\n" + +msgid "TEXT_WEBSITE_URL_DELETED" +msgstr "webbplats med adress: {0} har blivit borttagen\n" + msgid "TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE" msgstr "* Fel statuskod. Fick {0} när 404 vore korrekt.\n" From 0b72d8b5ae3f53feeeff26873b05e286f660860c Mon Sep 17 00:00:00 2001 From: Mattias Date: Fri, 11 Dec 2020 22:12:23 +0100 Subject: [PATCH 15/49] seperate translations for test + translated YTL --- default.py | 33 ++++---- .../frontend_quality_yellow_lab_tools.mo | Bin 0 -> 1727 bytes .../frontend_quality_yellow_lab_tools.po | 73 ++++++++++++++++++ locales/en/LC_MESSAGES/webperf-core.po | 3 - locales/en/LC_MESSAGES/yellow_lab_tools.mo | Bin 0 -> 1720 bytes .../frontend_quality_yellow_lab_tools.mo | Bin 0 -> 1770 bytes .../frontend_quality_yellow_lab_tools.po | 73 ++++++++++++++++++ locales/sv/LC_MESSAGES/webperf-core.po | 3 - ...y => frontend_quality_yellow_lab_tools.py} | 47 ++++++----- tests/lighthouse.py | 2 +- tests/lighthouse_a11y.py | 2 +- tests/lighthouse_best_practice.py | 2 +- tests/lighthouse_pwa.py | 2 +- tests/lighthouse_seo.py | 2 +- tests/page_not_found.py | 2 +- tests/privacy_webbkollen.py | 2 +- tests/standard_files.py | 2 +- tests/w3c_validate_css.py | 2 +- tests/w3c_validate_html.py | 2 +- 19 files changed, 200 insertions(+), 52 deletions(-) create mode 100644 locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo create mode 100644 locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.po create mode 100644 locales/en/LC_MESSAGES/yellow_lab_tools.mo create mode 100644 locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo create mode 100644 locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po rename tests/{yellow_lab_tools.py => frontend_quality_yellow_lab_tools.py} (51%) diff --git a/default.py b/default.py index ce12151c..eec58ebc 100644 --- a/default.py +++ b/default.py @@ -10,7 +10,7 @@ (TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_GOOGLE_LIGHTHOUSE_SEO, TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, TEST_HTML, TEST_CSS, TEST_GOOGLE_LIGHTHOUSE_PWA, TEST_STANDARD_FILES, TEST_GOOGLE_LIGHTHOUSE_A11Y, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_YELLOW_LAB_TOOLS, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) -def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last_hours=24, order_by='title ASC'): +def testsites(langCode, sites, test_type=None, show_reviews=False, only_test_untested_last_hours=24, order_by='title ASC'): """ Executing the actual tests. Attributes: @@ -55,9 +55,9 @@ def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last elif test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y: from tests.lighthouse_a11y import run_test elif test_type == TEST_YELLOW_LAB_TOOLS: - from tests.yellow_lab_tools import run_test + from tests.frontend_quality_yellow_lab_tools import run_test - the_test_result = run_test(website) + the_test_result = run_test(langCode, website) if the_test_result != None: print(_('TEXT_SITE_RATING'), the_test_result[0]) @@ -87,43 +87,42 @@ def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last return result -def testing(sites, test_type= TEST_ALL, show_reviews= False): +def testing(langCode, sites, test_type= TEST_ALL, show_reviews= False): print(_('TEXT_TESTING_START_HEADER').format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) tests = list() ############## if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE')) - tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y')) - tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_SEO): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO')) - tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_SEO, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_SEO, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_PWA): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA')) - tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) - tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): print(_('TEXT_TEST_PAGE_NOT_FOUND')) - tests.extend(testsites(sites, test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_HTML): print(_('TEXT_TEST_HTML')) - tests.extend(testsites(sites, test_type=TEST_HTML, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_HTML, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_CSS): print(_('TEXT_TEST_CSS')) - tests.extend(testsites(sites, test_type=TEST_CSS, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_CSS, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_WEBBKOLL): print(_('TEXT_TEST_WEBBKOLL')) - tests.extend(testsites(sites, test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_STANDARD_FILES): print(_('TEXT_TEST_STANDARD_FILES')) - tests.extend(testsites(sites, test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_YELLOW_LAB_TOOLS): - print(_('TEXT_TEST_YELLOW_LAB_TOOLS')) - tests.extend(testsites(sites, test_type=TEST_YELLOW_LAB_TOOLS, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, test_type=TEST_YELLOW_LAB_TOOLS, show_reviews=show_reviews)) return tests @@ -268,7 +267,7 @@ def main(argv): sites = delete_site(input_filename, delete_url) elif (len(sites)): # run test(s) for every website - siteTests = testing(sites, test_type=test_type, show_reviews=show_reviews) + siteTests = testing(langCode, sites, test_type=test_type, show_reviews=show_reviews) if (len(output_filename) > 0): file_ending = "" file_long_ending = "" diff --git a/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo b/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo new file mode 100644 index 0000000000000000000000000000000000000000..bb7c826ebb59c01aca9ef18c9e4fdb756857a6c2 GIT binary patch literal 1727 zcmaKrL66&15XVhbIOI|&AZQC{3K9ZV4(nA*+fWeMNce}YQ)H>mPYom$whR(uy! z^{#*_Zi45)d*I99A*g!#vWkBLs`GvTmF`!Nq4r-O!QMEnY3D%;ya-Mz`-_U-fp5b9 zGx#?6JE-bt&uiLSz{{!8oZT9x8 zre`|#aEzqY?A-7c4tLMCO}pcJNLqOM%*C{5 z&E7FP3zrJAw(r<}o9&w|<_&t)7H+fczUeXF?5kZY)JwjO<2>zjZat;LfrGUL%0OAA zmx%*DlDZk{`#j4dku<2+Y~8{&5LA6Es@8Kny_NAmBC59o$s4q_xwWYqTe@M8@lnIL zv;lh)Lfz$uk*ZSZ2CV7oT}N*>>4wwjnj3c=o7Jo!m%5h)NgfAMWDS}HQbs{e*Q+0& z9r0nxvvED=pF*8XieSQZj|Z~`v1AfQ`IN3pm@C4r*=5I581>CsOC*vfIBTAwX~`eT z57Rh^l8jcx=+WbKp@ZNs$VXX} z$`u7NXe4H7%pXP)Z7e0@(LXCR3z8@;uxngQzLHoqG%9i_W`wPev9Yz1gWZB>|D9?V zvmwvaEUB6+;|~g+pGd?AB95ONSL%*xv#>6*MfOSYE7Y{{nzC$yxva literal 0 HcmV?d00001 diff --git a/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.po b/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.po new file mode 100644 index 00000000..df22c3e9 --- /dev/null +++ b/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.po @@ -0,0 +1,73 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-11 16:14+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 17 - Quality on frontend (Yellow Lab Tools)" + +msgid "TEXT_WEBSITE_IS_VERY_GOOD" +msgstr "* The website is well built!\n" + +msgid "TEXT_WEBSITE_IS_GOOD" +msgstr "* The website is good.\n" + +msgid "TEXT_WEBSITE_IS_OK" +msgstr "* The website is not good or bad.\n" + +msgid "TEXT_WEBSITE_IS_BAD" +msgstr "* The website is quite slow or has a bad front-end code.\n" + +msgid "TEXT_WEBSITE_IS_VERY_BAD" +msgstr "* Very bad rating according to Yellow Lab Tools.\n" + +msgid "TEXT_OVERALL_GRADE" +msgstr "* Overall rating: {} out of 100\n" + +msgid "TEXT_TESTED_ON_DEVICETYPE" +msgstr "* Tested for device type: {}\n" + +msgid "TEXT_PAGE_WEIGHT" +msgstr "* page weight: {}\n" + +msgid "TEXT_PAGE_REQUESTS" +msgstr "* Number of page requests: {}\n" + +msgid "TEXT_PAGE_DOM_COMPLEXITY" +msgstr "* Page DOM complexity: {}\n" + +msgid "TEXT_PAGE_DOM_MANIPULATIONS" +msgstr "* Page DOM manipulations: {}\n" + +msgid "TEXT_PAGE_SCROLL" +msgstr "* Page scroll: {}\n" + +msgid "TEXT_PAGE_BAD_JS" +msgstr "* Page use of bad javascript: {}\n" + +msgid "TEXT_PAGE_JQUERY" +msgstr "* Page use of jQuery: {}\n" + +msgid "TEXT_PAGE_CSS_COMPLEXITY" +msgstr "* Page CSS complexity: {}\n" + +msgid "TEXT_PAGE_BAD_CSS" +msgstr "* Page use of bad CSS: {}\n" + +msgid "TEXT_PAGE_FONTS" +msgstr "* Page use of custom webfonts: {}\n" + +msgid "TEXT_SERVER_CONFIG" +msgstr "* Server configuration rating: {}\n" \ No newline at end of file diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index fcd96b55..45be99d1 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -113,9 +113,6 @@ msgstr "###############################\nRunning test: 20 - Webbkoll" msgid "TEXT_TEST_STANDARD_FILES" msgstr "###############################\nRunning test: 9 - Standard files" -msgid "TEXT_TEST_YELLOW_LAB_TOOLS" -msgstr "###############################\nKör test: 17 - Yellow Lab Tools (Quality on frontend)" - #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Valid arguments for option -t/--test:" diff --git a/locales/en/LC_MESSAGES/yellow_lab_tools.mo b/locales/en/LC_MESSAGES/yellow_lab_tools.mo new file mode 100644 index 0000000000000000000000000000000000000000..9466595d202a79b6418d1c3809c275e14275ca35 GIT binary patch literal 1720 zcmaKr&yU+w5XVhbIOI|&AfWuv6eI+!9M&tKZ77Ir@-}tr*jdNBZK@*6iC-MA96#57 z-fdL%!VRv73l}aNIdMZm;>3wFN02!12O$0dzRxi<>4rv%KAD;C%zHC$^85L-UupO} zhwm%+{-}(V&z&bU?L6#H!Dqk>d>Z@=d=dPz=6B%Buzv>6fq#HX_ZO)0&zxD>&)0kt zRP`=_DsF-=fp@@X!DCR}>5Dr44XDoh9#pztK!)0Xf&_c(tfpN6E$|{3)%NE#zXe}~ z|0nQu@HbG^(Vo<_*MJM4y2q887C3_KgQ~9|K$ZV1cm@0$d;!#-TGnHr0ec22-PfSz zv0I++95T0US$@xLciGZ4X!n@E+wS_E;qVd9^;HMLe#hw#EOyKEMk~s$)BpcKzipd? z1FP+sj=frK&#}E#`}+O?b62an44&bdX4r8Z%UU`4CNq22yrsiEux-=s`5uy%o*{G5 zEv{zonZ2b;1zFd3Y`@D6%?|TMgL(=#+3wKvm~Rf%ESBmOU(az)JDnS+bU1Ldu|O55 zstj^*#3xcWL;aBFMJ&=54ceU>cm{%Mh(R?5j;D8W9!Ny>ZXkJ!cD8r6bz?_23^G1w z8Sibu-iAM1*k~wdnFVmW>`yd5*?IeRt8KY!Pv( ziZ!zZ8O`?_+B@VNLy#l{Rh4%C0f|zQn393PKrlcDR2E#h#z9C};1=C~uqkvD90$cD zk2ATZKn6|3JWKe!SmGKh$yoZwh2}vTXC-Ehhbh()>kCcFLW((I>QhW?qvUA6aN~+{HNiTX^K(e)e2-7mq@1(PZDuL zRxl<{h@_ym_RAo_Fi4~{&8x=_-^Q?qJjZ)rB+W+6*+}i)(IzB zZATrQaW#e_mYm{(?w0CDp-zQ3kosSG+r_0X z3wU0@?-l%hG{M|+|A|83BH|Ch=fD(v7W@Q!5&UAt@4%N4{|r6@{s9`@U!Zya!i7A( zG~?@_@w*9{x&yuh-UputN1*BH(^>r+(5&}8Xmr1T4Bmf&R}e2R6$)>FO|S$G!B@eP z8Giz=BK{q`1};6B$JfEv5PRSya5Up*;4b2CL9?&FK{KDrPZbJxz#HIYa0osPeh#jJ zUxQ}dU%`UUcKx2a%{q3Y(W`gt8p~5{yUu!RcCAw7t(McO z_k6sRXFS%yZqZt6-Kpm(Q)IQC+v?TWwo_$(w>_J}4qNjapY%<@d|`iM~oulGShXL-LaakRkLZstu&qG58M_jHh88jUvZhmTqmkR zM_lWOXLNh^=gzU{r$P-%nRp*+84h_UEMM@W3VFvOh_ZdUtr0#VY&uQ$a2Ms$O0k+q zEhN?&r)XM>2l{#%^GLo+d;45vLch1=uUoes>P$}qp)4l%k|2^{g>LmDU91Z!6gD1M zYvT%~<4|a0R!YZIF1=kWT%*ZXBcVbSrCbJr`a+NK?nfUJA5j^Tfx?Rdm=l+y$%zak z%uO+voW@Z|xXwoYzq$iH;@KXS(F<}I&|z|v#^OPw#bOm7#d+2B$q&jjqtFajrkyNr z68Z)Ig|=}%ebVDJlaXFrY(aW(ZzvR|mhaBScw!+*iEM}iP&7af`Lbsc42rTtmBg_b z(p=VzFT;I_#bKVk`rl7+(|Bu2B}C#N(})5{Uin_Usd$uTu}pZp|= zv$xD?355g2(BvQrhdBG`myz$mha=Z{=R)oaonaDZeePf~xMMoQCMPF~Fkr1Io}9`s z<41(0V2jIlcJgO!ScgeaGUvZ&&{uq#-w?5;, 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-11 16:14+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 17 - Kvalitet på frontend (Yellow Lab Tools)" + +msgid "TEXT_WEBSITE_IS_VERY_GOOD" +msgstr "* Webbplatsen är välbyggd!\n" + +msgid "TEXT_WEBSITE_IS_GOOD" +msgstr "* Webbplatsen är bra.\n" + +msgid "TEXT_WEBSITE_IS_OK" +msgstr "* Helt ok.\n" + +msgid "TEXT_WEBSITE_IS_BAD" +msgstr "* Webbplatsen är rätt långsam eller har dålig frontend-kod.\n" + +msgid "TEXT_WEBSITE_IS_VERY_BAD" +msgstr "* Väldigt dåligt betyg enligt Yellow Lab Tools!\n" + +msgid "TEXT_OVERALL_GRADE" +msgstr "* Övergripande betyg: {} av 100\n" + +msgid "TEXT_TESTED_ON_DEVICETYPE" +msgstr "* Testat för enhetstyp: {}\n" + +msgid "TEXT_PAGE_WEIGHT" +msgstr "* Sidans storlek: {}\n" + +msgid "TEXT_PAGE_REQUESTS" +msgstr "* Andelen resurser för sidan: {}\n" + +msgid "TEXT_PAGE_DOM_COMPLEXITY" +msgstr "* Sidans DOM komplexitet: {}\n" + +msgid "TEXT_PAGE_DOM_MANIPULATIONS" +msgstr "* Förändringar av sidans DOM: {}\n" + +msgid "TEXT_PAGE_SCROLL" +msgstr "* Sid skrollning: {}\n" + +msgid "TEXT_PAGE_BAD_JS" +msgstr "* Sidans användning av dålig javascript: {}\n" + +msgid "TEXT_PAGE_JQUERY" +msgstr "* Sidans användning av jQuery: {}\n" + +msgid "TEXT_PAGE_CSS_COMPLEXITY" +msgstr "* Sidans CSS komplexitet: {}\n" + +msgid "TEXT_PAGE_BAD_CSS" +msgstr "* Sidans användning av dålig CSS: {}\n" + +msgid "TEXT_PAGE_FONTS" +msgstr "* Sidans användning av webb typsnitt: {}\n" + +msgid "TEXT_SERVER_CONFIG" +msgstr "* Server konfiguration: {}\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index ca0a5f3d..459be3cb 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -113,9 +113,6 @@ msgstr "###############################\nKör test: 20 - Webbkoll" msgid "TEXT_TEST_STANDARD_FILES" msgstr "###############################\nKör test: 9 - Standardfiler" -msgid "TEXT_TEST_YELLOW_LAB_TOOLS" -msgstr "###############################\nKör test: 17 - Yellow Lab Tools (Kvalitet på frontend)" - #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Giltiga argument att välja på -t/--test:" diff --git a/tests/yellow_lab_tools.py b/tests/frontend_quality_yellow_lab_tools.py similarity index 51% rename from tests/yellow_lab_tools.py rename to tests/frontend_quality_yellow_lab_tools.py index 9a5cafa6..db27fca1 100644 --- a/tests/yellow_lab_tools.py +++ b/tests/frontend_quality_yellow_lab_tools.py @@ -10,16 +10,25 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url, device='phone'): +def run_test(langCode, url, device='phone'): """ Analyzes URL with Yellow Lab Tools docker image. Devices might be; phone, tablet, desktop """ + + language = gettext.translation('frontend_quality_yellow_lab_tools', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_("TEXT_RUNNING_TEST")) + r = requests.post('https://yellowlab.tools/api/runs', data = {'url':url, "waitForResponse":'true', 'device': device}) result_url = r.url @@ -48,28 +57,28 @@ def run_test(url, device='phone'): rating = 1 if rating == 5: - review = '* Webbplatsen är välbyggd!\n' + review = _("TEXT_WEBSITE_IS_VERY_GOOD") elif rating >= 4: - review = '* Webbplatsen är bra.\n' + review = _("TEXT_WEBSITE_IS_GOOD") elif rating >= 3: - review = '* Helt ok.\n' + review = _("TEXT_WEBSITE_IS_OK") elif rating >= 2: - review = '* Webbplatsen är rätt långsam eller har dålig frontend-kod.\n' + review = _("TEXT_WEBSITE_IS_BAD") elif rating <= 1: - review = '* Väldigt dåligt betyg enligt Yellow Lab Tools!\n' + review = _("TEXT_WEBSITE_IS_VERY_BAD") - review += '* Övergripande betyg: {} av 100\n'.format(return_dict["globalScore"]) - review += '* Testat för devicetyp: {}\n'.format(device) - review += '* pageWeight: {}\n'.format(return_dict["pageWeight"]) - review += '* requests: {}\n'.format(return_dict["requests"]) - review += '* domComplexity: {}\n'.format(return_dict["domComplexity"]) - review += '* domManipulations: {}\n'.format(return_dict["domManipulations"]) - review += '* scroll: {}\n'.format(return_dict["scroll"]) - review += '* badJavascript: {}\n'.format(return_dict["badJavascript"]) - review += '* jQuery: {}\n'.format(return_dict["jQuery"]) - review += '* cssComplexity: {}\n'.format(return_dict["cssComplexity"]) - review += '* badCSS: {}\n'.format(return_dict["badCSS"]) - review += '* fonts: {}\n'.format(return_dict["fonts"]) - review += '* serverConfig: {}\n'.format(return_dict["serverConfig"]) + review += _("TEXT_OVERALL_GRADE").format(return_dict["globalScore"]) + review += _("TEXT_TESTED_ON_DEVICETYPE").format(device) + review += _("TEXT_PAGE_WEIGHT").format(return_dict["pageWeight"]) + review += _("TEXT_PAGE_REQUESTS").format(return_dict["requests"]) + review += _("TEXT_PAGE_DOM_COMPLEXITY").format(return_dict["domComplexity"]) + review += _("TEXT_PAGE_DOM_MANIPULATIONS").format(return_dict["domManipulations"]) + review += _("TEXT_PAGE_SCROLL").format(return_dict["scroll"]) + review += _("TEXT_PAGE_BAD_JS").format(return_dict["badJavascript"]) + review += _("TEXT_PAGE_JQUERY").format(return_dict["jQuery"]) + review += _("TEXT_PAGE_CSS_COMPLEXITY").format(return_dict["cssComplexity"]) + review += _("TEXT_PAGE_BAD_CSS").format(return_dict["badCSS"]) + review += _("TEXT_PAGE_FONTS").format(return_dict["fonts"]) + review += _("TEXT_SERVER_CONFIG").format(return_dict["serverConfig"]) return (rating, review, return_dict) \ No newline at end of file diff --git a/tests/lighthouse.py b/tests/lighthouse.py index 72c8f93a..e6ce5a6c 100644 --- a/tests/lighthouse.py +++ b/tests/lighthouse.py @@ -15,7 +15,7 @@ request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url, strategy='mobile', category='performance'): +def run_test(langCode, url, strategy='mobile', category='performance'): """ perf = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=performance&strategy=mobile&url=YOUR-SITE&key=YOUR-KEY a11y = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=accessibility&strategy=mobile&url=YOUR-SITE&key=YOUR-KEY diff --git a/tests/lighthouse_a11y.py b/tests/lighthouse_a11y.py index eacb2f12..9ed9ae27 100644 --- a/tests/lighthouse_a11y.py +++ b/tests/lighthouse_a11y.py @@ -15,7 +15,7 @@ request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url, strategy='mobile', category='accessibility'): +def run_test(langCode, url, strategy='mobile', category='accessibility'): check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) diff --git a/tests/lighthouse_best_practice.py b/tests/lighthouse_best_practice.py index f3a2111e..406606be 100644 --- a/tests/lighthouse_best_practice.py +++ b/tests/lighthouse_best_practice.py @@ -15,7 +15,7 @@ request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url, strategy='mobile', category='best-practices'): +def run_test(langCode, url, strategy='mobile', category='best-practices'): check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&strategy={2}&key={3}'.format(category, check_url, strategy, googlePageSpeedApiKey) diff --git a/tests/lighthouse_pwa.py b/tests/lighthouse_pwa.py index 1a63aa30..ac22e974 100644 --- a/tests/lighthouse_pwa.py +++ b/tests/lighthouse_pwa.py @@ -15,7 +15,7 @@ request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url, strategy='mobile', category='pwa'): +def run_test(langCode, url, strategy='mobile', category='pwa'): check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) diff --git a/tests/lighthouse_seo.py b/tests/lighthouse_seo.py index 607bb88c..187a3e51 100644 --- a/tests/lighthouse_seo.py +++ b/tests/lighthouse_seo.py @@ -15,7 +15,7 @@ request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url, strategy='mobile', category='seo'): +def run_test(langCode, url, strategy='mobile', category='seo'): check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) diff --git a/tests/page_not_found.py b/tests/page_not_found.py index d3ded154..da52980e 100644 --- a/tests/page_not_found.py +++ b/tests/page_not_found.py @@ -14,7 +14,7 @@ ### DEFAULTS request_timeout = config.http_request_timeout -def run_test(url): +def run_test(langCode, url): """ Only work on a domain-level. Returns tuple with decimal for grade and string with review """ diff --git a/tests/privacy_webbkollen.py b/tests/privacy_webbkollen.py index cc592999..cec5591e 100644 --- a/tests/privacy_webbkollen.py +++ b/tests/privacy_webbkollen.py @@ -14,7 +14,7 @@ ### DEFAULTS request_timeout = config.http_request_timeout -def run_test(url): +def run_test(langCode, url): import time points = 0.0 errors = 0 diff --git a/tests/standard_files.py b/tests/standard_files.py index 4e6dd09f..6fedf3f0 100644 --- a/tests/standard_files.py +++ b/tests/standard_files.py @@ -15,7 +15,7 @@ request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url): +def run_test(langCode, url): """ Looking for: * robots.txt diff --git a/tests/w3c_validate_css.py b/tests/w3c_validate_css.py index 355e2314..12bdc514 100644 --- a/tests/w3c_validate_css.py +++ b/tests/w3c_validate_css.py @@ -14,7 +14,7 @@ ### DEFAULTS request_timeout = config.http_request_timeout -def run_test(url): +def run_test(langCode, url): """ Only work on a domain-level. Returns tuple with decimal for grade and string with review """ diff --git a/tests/w3c_validate_html.py b/tests/w3c_validate_html.py index db584364..14782232 100644 --- a/tests/w3c_validate_html.py +++ b/tests/w3c_validate_html.py @@ -14,7 +14,7 @@ ### DEFAULTS request_timeout = config.http_request_timeout -def run_test(url): +def run_test(langCode, url): """ Only work on a domain-level. Returns tuple with decimal for grade and string with review """ From 92a2fde9b40e73081410104300cb2b9d843323da Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 10:05:07 +0100 Subject: [PATCH 16/49] moved translations to separate files for page_not_found test also did some adjusting for translations --- default.py | 1 - locales/en/LC_MESSAGES/page_not_found.mo | Bin 0 -> 1046 bytes locales/en/LC_MESSAGES/page_not_found.po | 37 +++++++++++++++++++++++ locales/en/LC_MESSAGES/webperf-core.mo | Bin 4571 -> 4441 bytes locales/sv/LC_MESSAGES/page_not_found.mo | Bin 0 -> 1036 bytes locales/sv/LC_MESSAGES/page_not_found.po | 37 +++++++++++++++++++++++ locales/sv/LC_MESSAGES/webperf-core.mo | Bin 4681 -> 3827 bytes locales/sv/LC_MESSAGES/webperf-core.po | 28 ----------------- tests/page_not_found.py | 20 ++++++++---- 9 files changed, 88 insertions(+), 35 deletions(-) create mode 100644 locales/en/LC_MESSAGES/page_not_found.mo create mode 100644 locales/en/LC_MESSAGES/page_not_found.po create mode 100644 locales/sv/LC_MESSAGES/page_not_found.mo create mode 100644 locales/sv/LC_MESSAGES/page_not_found.po diff --git a/default.py b/default.py index eec58ebc..a488bd39 100644 --- a/default.py +++ b/default.py @@ -107,7 +107,6 @@ def testing(langCode, sites, test_type= TEST_ALL, show_reviews= False): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): - print(_('TEXT_TEST_PAGE_NOT_FOUND')) tests.extend(testsites(langCode, sites, test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_HTML): print(_('TEXT_TEST_HTML')) diff --git a/locales/en/LC_MESSAGES/page_not_found.mo b/locales/en/LC_MESSAGES/page_not_found.mo new file mode 100644 index 0000000000000000000000000000000000000000..af349de55fd619f7e8fccc4926758ee7bfbde423 GIT binary patch literal 1046 zcmaJ<%Wl&^6g7_pieN#7KuDlo!KS5kaH>|SLO~75G_~5;k?ppCkgSP)o0vM|$es`) z#Akq5z^CvHtl07ed;m+1L!qP#Mm|1s=iIq3`}@M>_XgG#;12MzH2ztiuwMi|1Gj)L zz)j#AK)?^+25{|yVcZ4Z178Pkg9YFLA?%MA4dXWWC-^FO;gVt4;6?B%I00{hKY(|@ zU%^Hoo&}*N4sCH13eR)Bu;aJG1E(dtu(n~%>79n{gu9{vZSLH05uwxY_Wbk5{*h?e z{_a`i^A3R>?2Ea>BhPi(p&vAY13zrKtv~94Xx+v+k&I%@$00Fa_aY7G(5B8 znym)yy7i8|^1^jQWnUKB405T8RO&3R(@<)i$bz;`H`^2LjW|E97W@R>sy~)}HUpN! zItkTJlVU(y8s?m1$L@%;E^1Y)(#(`*g>oQn%G^JW~qhdUu047Dh(^1N*uLqPA z)Z;Xp5WfB_p^P+Uyh@3RlL+HfebNJ|@nRYmoR>FLrg5v3az-UN$JH|aPBvCCL>jP+ z(?l^X4Qk7!;$-j~%KW5&;WG~l(ix>PI+-<0F&9yu^!_H3iV!x-X@G%eQ8X@d&{Cqv zpKy%HF3%?%Vf3UEoigg4l{z~avX-&ba^`%LXDGPPQjZHvA?7Nz(Z^fs9Zd$TXx&<; qNj6Snv<4j+BaMsq|Gz5USgBdvTxk4rWX=LDb<2v%In9Q$Z2SU~Jtb-Y literal 0 HcmV?d00001 diff --git a/locales/en/LC_MESSAGES/page_not_found.po b/locales/en/LC_MESSAGES/page_not_found.po new file mode 100644 index 00000000..ef7eddea --- /dev/null +++ b/locales/en/LC_MESSAGES/page_not_found.po @@ -0,0 +1,37 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:00+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 2 - 404 (Page not Found)" + +msgid "TEXT_REVIEW_WRONG_STATUS_CODE" +msgstr "* Wrong status code. Got {0} when 404 would be correct.\n" + +msgid "TEXT_REVIEW_NO_TITLE" +msgstr "* Found no page title in the page metadata.\n" + +msgid "TEXT_REVIEW_MAIN_HEADER" +msgstr "* Found no headline (h1)\n" + +msgid "TEXT_REVIEW_NO_SWEDISH_ERROR_MSG" +msgstr "* Seems to lack text describing that an error has occurred (in Swedish).\n" + +msgid "TEXT_REVIEW_ERROR_MSG_UNDER_150" +msgstr "* Text content length is below 150 characters, indicating that the user is not being referred.\n" + +msgid "TEXT_REVIEW_NO_REMARKS" +msgstr "* No remarks" diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index e9b9a5d1c8d389894d82a09a9294d11203e8a314..a48e7879104418ee0922733743f809d97c4fb817 100644 GIT binary patch delta 673 zcmYMyPbkA-7{KvojIlA>V*ay?`MaS;3b`uix+eyw6+D?|t9reV@HgHPIKBc~^Mq`6~FPOd9<^8yO-l z!Wh~yfet(~yhhD?Kqr1;DN3eD1C*f$2eA?7u@X1YClZwtI{6GdVmZE|2UDmCRc4WD z3>hw_sp0ONfppE+Vh2(ad5T zbq7ftMO&Uo3r=Gfu3`sX;shpf68kA<7SGH&Zn7|&qcW{zAk&T<9KW@J` Q$XGF7_$|5C?U-}?4~|Yi-T(jq delta 740 zcmYMyOGq106oBEQ#s|@uiJ(-CX7CYXeAPu`)rB<}i!=~4!56~d!i^}1W}zUY(2ZSm z6^bG?AP8<0gg_S}Sa2z*%N9kDf_o`$M2i0*0}eCuopa&Nx%bTd({SCGx~*`07p?%0 zmuDtVv)AiKzDNb>4wmB)8hC1Zg*xvJJ@_B1(J07$Gl(X699uDob(lfFNJ>r!Tnzlh zT6{ngU5?y@PV|va+ODE5WDDQmKI*(*c7B6Zrl7AT9U}wLmueHGj=fnTW&_)>I@KjZIk5!LXHx#iH@-bCKJb4!eDZ e^V8YSWivUqlI%0Pd(4jXz7a@wdwl7jr|Sj%rcy)z diff --git a/locales/sv/LC_MESSAGES/page_not_found.mo b/locales/sv/LC_MESSAGES/page_not_found.mo new file mode 100644 index 0000000000000000000000000000000000000000..6331c9a6b21e467e7ab1df972f4e0a5a221121c6 GIT binary patch literal 1036 zcmaJ<%Wl&^6g7_pio^m0Wdq^@o0itWsamND1vMmYVzr4Y+i7`8)+C-J#>qIEnWRLB z%#cI_{)W0eJ8z?Lsy$p>&8ib^WP$VbQb-s5}Ewf+72<3UC{EodtiLkGQ`J zd;)F(Uw{SR8$iGh;0AE@f??bN-v?gPK~xji>9%aSvVs##&07ira6wamgQE-F|>%UVWkL1=n>9A{ll zbua9aXVzV_Qlgq$tUJpu-G-gpWlEcYWJ2|sPGyk>OzW5_+C074I^?Y(mz})gPoWmw z5$kd@;A~JN|By$q>d~e~I47(-b^C0KLf*=iQ=z%QtjQ3jHGiug4f`w>kEz{bQgQvH z8El&yXS&Q&hfC8I?KFx-w@4eUSm&x-aEZku^Vy_G!%3IxtXY0Ip+bH=XFMcFbW#cT zSe>TFl13uplJMCvwViOWLgTofa7}uW6^7Fj!p_9RczP@%Cb^&(UI}5h&m#qSmd$6q zYq8cW;t+Hp>y5^vNRC=EPH3@LSo%G^%g}3p$%%lB$vmFJ2ThnH#S+10%ack6)Z!|U z@fapD16YJQyicRyQ0eIjqQ%Uc8gro%w)BtrI@Zw7M6f8b@!Lo!&GbkmX_Tk!xSh~D y>ph9=O{`gKG)^U_B$bjU8Z96E|KxV2pCt?{jUSE7S%*c1VbqC*P_q}cWc&i-`zk~L literal 0 HcmV?d00001 diff --git a/locales/sv/LC_MESSAGES/page_not_found.po b/locales/sv/LC_MESSAGES/page_not_found.po new file mode 100644 index 00000000..f3a1ea8e --- /dev/null +++ b/locales/sv/LC_MESSAGES/page_not_found.po @@ -0,0 +1,37 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:00+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 2 - 404 (sida finns inte)" + +msgid "TEXT_REVIEW_WRONG_STATUS_CODE" +msgstr "* Fel statuskod. Fick {0} när 404 vore korrekt.\n" + +msgid "TEXT_REVIEW_NO_TITLE" +msgstr "* Hittade ingen titel på sidan\n" + +msgid "TEXT_REVIEW_MAIN_HEADER" +msgstr "* Hittade ingen huvudrubrik (h1)\n" + +msgid "TEXT_REVIEW_NO_SWEDISH_ERROR_MSG" +msgstr "* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n" + +msgid "TEXT_REVIEW_ERROR_MSG_UNDER_150" +msgstr "* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n" + +msgid "TEXT_REVIEW_NO_REMARKS" +msgstr "* Inga anmärkningar." diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 8c8d1e9609056a4b1b071abb4c5a9fdfb10d3e59..47e9d02c6c63c6d83f220917fe232256939d7445 100644 GIT binary patch delta 583 zcmYk(JxD@P6u|N0^NIAKrBw9c7kxnalq89YLWiKCmcqtJT!U(B%%P#BA$&+6RC5tD zJP7w%8X|~jl12%vsT!)K{|SdK-1j>d-nr-Ab8m;=!^P*ISr$f&8Pe(E^KXOi!EPi&`L$y|{%vxR1Kw35KzX z7CvDL4O3(W(>RWsO^-3c`VL3&wQ09UWRP_R8fz)v~qONn3=>zKybc9A*UM!nGm z#<7OvikH6X4T7kxoWvo_qlJ5zz)NIHsi8j63wEK0HtQ4iV?K3=mp)fg9*ilJ?Wz|HxN+-VK8J5+hFs1nt(nuPyo zGm|KZEgVFJ+wdap5TY)AV$jXR1AGntM2X$;=7JQyNj%r`BZnSXEq zyVo~2PM|Jq4}MHH=kN(#`TFJNy3?IPFeSdnSJ+?t$YAvZs4KsRe8p3K4&nd!79Q#n zVggU0#G5#Rk8l*X(8f=33isi8oWjRw;TUbR@hf~6f5Uo~LGK13`f&z7!UfbFxrX|r zcd-NGym;M#4XEGcD87S-aR|S}VZ4siEbgNIZ#=|J7~>DZP;ACDCVF|Z?=v{S#2lW* zo%jTG0bBbT1F;MBvGkZzG-h(ftYZ|5X3?3pCY=3+tWk7E-b)MHn6n+*ux#gr#OX|~ z;7l4BlGaWrn2t4TWOLT!^M|gTxr~(C@%v;MB*WHQNlQI_^ zuY`-PDpvxzaygXMpyY*;vrFY2^SxkL*2)XMr=I6}0$s~E6OnRN zHS#N^lpHS~^W_%<=~FV$r{?X-S|#+PUkO9cSE-K1na<(mOC5i=wU5f3jX%0K_v>EG lyPSNH6@I{>+%UD8cA@Lvw#M1cq4>ydaz}fTjdMLmo&l&z3abDB diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index 459be3cb..b4898f31 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -21,29 +21,6 @@ msgstr "webbplats med adress: {0} har blivit tillagd\n" msgid "TEXT_WEBSITE_URL_DELETED" msgstr "webbplats med adress: {0} har blivit borttagen\n" -msgid "TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE" -msgstr "* Fel statuskod. Fick {0} när 404 vore korrekt.\n" - -#: tests/page_not_found.py:55 -msgid "TEXT_TEST_404_REVIEW_NO_TITLE" -msgstr "* Hittade ingen titel på sidan\n" - -#: tests/page_not_found.py:65 -msgid "TEXT_TEST_404_REVIEW_MAIN_HEADER" -msgstr "* Hittade ingen huvudrubrik (h1)\n" - -#: tests/page_not_found.py:124 -msgid "TEXT_TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" -msgstr "* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n" - -#: tests/page_not_found.py:131 -msgid "TEXT_TEST_404_REVIEW_ERROR_MSG_UNDER_150" -msgstr "* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n" - -#: tests/page_not_found.py:134 -msgid "TEXT_TEST_REVIEW_NO_REMARKS" -msgstr "* Inga anmärkningar." - #: default.py:24 msgid "TEXT_TEST_START_HEADER" msgstr "###############################################" @@ -92,11 +69,6 @@ msgstr "###############################\nKör test: 8 - Google Lighthouse progre msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" msgstr "###############################\nKör test: 5 - Google Lighthouse god praxis" - -#: default.py:107 -msgid "TEXT_TEST_PAGE_NOT_FOUND" -msgstr "###############################\nKör test: 2 - 404 (sida finns inte)" - #: default.py:111 msgid "TEXT_TEST_HTML" msgstr "###############################\nKör test: 6 - HTML" diff --git a/tests/page_not_found.py b/tests/page_not_found.py index da52980e..b88c4d04 100644 --- a/tests/page_not_found.py +++ b/tests/page_not_found.py @@ -10,6 +10,8 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout @@ -23,6 +25,12 @@ def run_test(langCode, url): review = '' result_dict = {} + language = gettext.translation('page_not_found', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + ## kollar koden o = urllib.parse.urlparse(url) url = '{0}://{1}/{3}/{2}'.format(o.scheme, o.netloc, 'finns-det-en-sida/pa-den-har-adressen/testanrop/', get_guid(5)) @@ -32,7 +40,7 @@ def run_test(langCode, url): if code == 404: points += 2.0 else: - review = review + _('TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE').format(request.status_code) #'* Fel statuskod. Fick {0} när 404 vore korrekt.\n'.format(request.status_code) + review = review + _('TEXT_REVIEW_WRONG_STATUS_CODE').format(request.status_code) result_dict['status_code'] = code @@ -52,7 +60,7 @@ def run_test(langCode, url): if title: result_dict['page_title'] = title.string else: - review = review + _('TEXT_TEST_404_REVIEW_NO_TITLE') #'* hittade ingen titel på sidan\n' + review = review + _('TEXT_REVIEW_NO_TITLE') except: print('Error getting page title!\nMessage:\n{0}'.format(sys.exc_info()[0])) @@ -62,7 +70,7 @@ def run_test(langCode, url): if h1: result_dict['h1'] = h1.string else: - review = review + _('TEXT_TEST_404_REVIEW_MAIN_HEADER') #'* hittade ingen huvud rubrik (h1)\n' + review = review + _('TEXT_REVIEW_MAIN_HEADER') except: print('Error getting H1!\nMessage:\n{0}'.format(sys.exc_info()[0])) @@ -121,17 +129,17 @@ def run_test(langCode, url): if found_match == False: - review = review + _('TEXT_TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG') #'* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n' + review = review + _('TEXT_REVIEW_NO_SWEDISH_ERROR_MSG') ## hur långt är inehållet soup = BeautifulSoup(request.text, 'html.parser') if len(soup.get_text()) > 150: points += 1.5 else: - review = review + _('TEXT_TEST_404_REVIEW_ERROR_MSG_UNDER_150') #'* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n' + review = review + _('TEXT_REVIEW_ERROR_MSG_UNDER_150') #'* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n' if len(review) == 0: - review = _('TEXT_TEST_REVIEW_NO_REMARKS') + review = _('TEXT_REVIEW_NO_REMARKS') if points == 0: points = 1.0 From 819e5ea38bbc3e6f06ba275bf073d09248a1931d Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 10:30:14 +0100 Subject: [PATCH 17/49] translated html validator and changed name convention for it --- default.py | 3 +- locales/en/LC_MESSAGES/html_validator_w3c.mo | Bin 0 -> 944 bytes locales/en/LC_MESSAGES/html_validator_w3c.po | 34 ++++ locales/en/LC_MESSAGES/webperf-core.mo | Bin 4441 -> 3623 bytes locales/en/LC_MESSAGES/webperf-core.po | 32 ---- .../frontend_quality_yellow_lab_tools.po | 2 +- locales/sv/LC_MESSAGES/html_validator_w3c.mo | Bin 0 -> 939 bytes locales/sv/LC_MESSAGES/html_validator_w3c.po | 34 ++++ locales/sv/LC_MESSAGES/page_not_found.po | 2 +- locales/sv/LC_MESSAGES/webperf-core.po | 4 - locales/webperf-core.pot | 156 ------------------ ...validate_html.py => html_validator_w3c.py} | 18 +- 12 files changed, 84 insertions(+), 201 deletions(-) create mode 100644 locales/en/LC_MESSAGES/html_validator_w3c.mo create mode 100644 locales/en/LC_MESSAGES/html_validator_w3c.po create mode 100644 locales/sv/LC_MESSAGES/html_validator_w3c.mo create mode 100644 locales/sv/LC_MESSAGES/html_validator_w3c.po delete mode 100644 locales/webperf-core.pot rename tests/{w3c_validate_html.py => html_validator_w3c.py} (72%) diff --git a/default.py b/default.py index a488bd39..af23c288 100644 --- a/default.py +++ b/default.py @@ -37,7 +37,7 @@ def testsites(langCode, sites, test_type=None, show_reviews=False, only_test_unt if test_type == TEST_PAGE_NOT_FOUND: from tests.page_not_found import run_test elif test_type == TEST_HTML: - from tests.w3c_validate_html import run_test + from tests.html_validator_w3c import run_test elif test_type == TEST_CSS: from tests.w3c_validate_css import run_test elif test_type == TEST_WEBBKOLL: @@ -109,7 +109,6 @@ def testing(langCode, sites, test_type= TEST_ALL, show_reviews= False): if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): tests.extend(testsites(langCode, sites, test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_HTML): - print(_('TEXT_TEST_HTML')) tests.extend(testsites(langCode, sites, test_type=TEST_HTML, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_CSS): print(_('TEXT_TEST_CSS')) diff --git a/locales/en/LC_MESSAGES/html_validator_w3c.mo b/locales/en/LC_MESSAGES/html_validator_w3c.mo new file mode 100644 index 0000000000000000000000000000000000000000..c0601d6b6c69fa67cd6d9c44b298c27d227ea39b GIT binary patch literal 944 zcmb`E&59F25XajO5C%Lbq9ThHcysA-5?NWsb#XT7O@?e{Led*uK|*iRnaq;y9=h9) zpuT|~eFSfwJ$msOyo%4@6Ic@hi4Z-A&HSjYsek{wYUcap^B)MV3&;k-%VcG}!~Hen zJ#roSh+IX!Af^5ras&B^TtY6MA%r2fkbTVMSwik%{IrNaFg7v2a*mLj7@uJ*>u)d; z&Yti=m-S=zc+lkSgF)OI>_zpp?;B~lzL2)o>ievFwBBr9S+P4zk~nD$IO}oJG5SOf z9gRn{FOAK$3ZWC#T2X@mp6kVF;&oC^Yeou(RazHLhENTv0j*T=fl3fowl*;jP)NJ- zMP9lrbVn@e(soMg5j4}V9dABO6XqQV>nJxu*{N{agm5aH%Y}uV#pcmVd0a>{_O0AS zwaT80Oj0hzX$VYZ)7(yA$6@-LupPJAa*B!{cr~q@RCw2{z^0viGo1r_B57;GBwnrTxB6_ALq_%NF@ypKB9ZG5Q|6gqW=b5R-4)yrYEl1$A n6Vp, 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:15+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 6 - HTML" + +msgid "TEXT_REVIEW_HTML_VERY_GOOD" +msgstr "* No errors in the HTML code.\n" + +msgid "TEXT_REVIEW_HTML_IS_GOOD" +msgstr "* The tested page has {0} errors in its HTML code.\n" + +msgid "TEXT_REVIEW_HTML_IS_OK" +msgstr "* The tested page has {0} errors in its HTML code.\n" + +msgid "TEXT_REVIEW_HTML_IS_BAD" +msgstr "* The tested page has {0} errors in its HTML code. It is not so good.\n" + +msgid "TEXT_REVIEW_HTML_IS_VERY_BAD" +msgstr "* The tested page has lots of errors in its HTML code. A total of {0}. \n" diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index a48e7879104418ee0922733743f809d97c4fb817..470ec649a1bc3fad32453bf638cda7fb9010ea2a 100644 GIT binary patch delta 562 zcmYMxze~eV5Ww+kT1(ZWsiBHuHKqP;!MZqzgQXw^2TR>u#l?dEfbCXrlb~Y2Stk*K zh!h;eO~g&;po?^H5Vx-SJq-BZ@p+fL_wI7(>@cEVn_dy7jcvqRUE-x)QcK8foE92TU^2q zT)}jVVsJC`04IsBa2}sSdzd43`$Y0sMu+^e$3hb}Q42W5b-HkkT754rGRO0yevt&; zg+AaE@e}H~ZydzLfXFhYv5FhGg+A8s3oE$B<_h_x#iE2Ss9%yYMP{&s?3Epy#(mU6 zPH-0QQCr^)HR!h%;G(v?fWufp{bk#@h;>}Wb4+TY4hy~b$MOyh-+PPR1h27n+$-Cz Qm$K~O(`-kAqMbK>0l)h>c>n+a delta 1270 zcmZwFPiz!b9Ki9{77@CXQi4?x;ROWS(w5eg#>Rs#bh^7rcVT8qz34d1JUSD$GiK(k zl^C;$iHQfnkkzQf0|^H&8cllSASYrX2{D|!m~g;@2O^0#CE@$FtCXah+4;PGzj?p+ zoB4LnZ~L3S@7(l-;P)c`J^bA+(|h#$v|EUsbT8m`yn;LMhmP0L#Qll`cn_aN(IdnV z>_&;laTr5<9xvh{A)4Yl8hs4h#^>-3N_>DOVDBa&_F=hWgeH+s@EQCPP27*2{!JXD ze-}+6|DxG&@ChLf;}lNdJ2=Ds;u4J`4E)mZAKXj-@aDA>yxuX#{fvKuBlr`V%KV9D z!v|;*+V!Lm7F8U>%Ty!ZvR2*g1tDmP>5i}B0rnSV8YZxgT;gMH9$rFog8Nv(!ZsmB zaRFb(kMJm7!y4X48;g|l6n>5+{G;PIWt*n|9?s$S*d#{$P2)A}>R+qGF+4~=!(RLf zU%;zqzLTHv5dMYc-9OC-VVpv9fKzB*$y@joo<}b68Mkq~g0JAM9iqRzQGqYVJa}@T z)!Ti!IW;lm+1BZbwdh&4U9-J;r{pbEXD!>CJUSs<>rK~lEywlNEu1e_s$SVDvS{N4 z)tculTC)|W{OF+@uROw1x8k0#HlALzYt@qHxJ7rt@s8JKt=8Aw*JsM^{E2nzPZmp- zSFO3;T#dxnO?1x17Pd8CwBK-6t35Y1H%Fwa-q$in6Ri?0>nd5&k&JVBM%B|5$+2Y+ z`I#SRmF1%{PQp0wb(}0o9r>D@l8sztCL&2SQD!PrXH}M|aJ*NH$howUgiOjv`C&ay zlpKmChs}(mRC76(I+b-lI48A9Bvg5j#b^HCz)vKPN;4VxxlDtgVV*08;zT+tDva}J z`0-8N^_P^?v92p}ZKu~BSytK)eeJgwFJ>u8=Gxbd(VU})YFw5`{)34R!;9_HCH&_uQdw(V=ZUFcsu)_bjM>$Dt`?c)D8-1CwgYfbhI_KnDDN)49%>|EXs KTD@18>3#_3DEvPF diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index 45be99d1..9c137e21 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -21,30 +21,6 @@ msgstr "website with url: {0} has been added\n" msgid "TEXT_WEBSITE_URL_DELETED" msgstr "website with url: {0} has been deleted\n" -#: tests/page_not_found.py:35 -msgid "TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE" -msgstr "* Wrong status code. Got {0} when 404 would be correct.\n" - -#: tests/page_not_found.py:55 -msgid "TEXT_TEST_404_REVIEW_NO_TITLE" -msgstr "* Found no page title in the page metadata.\n" - -#: tests/page_not_found.py:65 -msgid "TEXT_TEST_404_REVIEW_MAIN_HEADER" -msgstr "* Found no headline (h1)\n" - -#: tests/page_not_found.py:124 -msgid "TEXT_TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" -msgstr "* Seems to lack text describing that an error has occurred (in Swedish).\n" - -#: tests/page_not_found.py:131 -msgid "TEXT_TEST_404_REVIEW_ERROR_MSG_UNDER_150" -msgstr "* Text content length is below 150 characters, indicating that the user is not being referred.\n" - -#: tests/page_not_found.py:134 -msgid "TEXT_TEST_REVIEW_NO_REMARKS" -msgstr "* No remarks" - #: default.py:24 msgid "TEXT_TEST_START_HEADER" msgstr "###############################################" @@ -93,14 +69,6 @@ msgstr "###############################\nRunning test: 8 - Google Lighthouse PWA msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" msgstr "###############################\nRunning test: 5 - Google Lighthouse Best Practice" -#: default.py:107 -msgid "TEXT_TEST_PAGE_NOT_FOUND" -msgstr "###############################\nRunning test: 2 - 404 (Page not Found)" - -#: default.py:111 -msgid "TEXT_TEST_HTML" -msgstr "###############################\nRunning test: 6 - HTML" - #: default.py:114 msgid "TEXT_TEST_CSS" msgstr "###############################\nRunning test: 7 - CSS" diff --git a/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po index 11e2317f..3cc31d17 100644 --- a/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po +++ b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po @@ -1,4 +1,4 @@ -# English (default). +# Swedish # Copyright (C) 2020 WebPerf # FIRST AUTHOR , 2020. # diff --git a/locales/sv/LC_MESSAGES/html_validator_w3c.mo b/locales/sv/LC_MESSAGES/html_validator_w3c.mo new file mode 100644 index 0000000000000000000000000000000000000000..b1be8fd7bcc3243375ead1c09922c1f4557d0896 GIT binary patch literal 939 zcmb`E&59F25XajO5C-<32UkU~z?(~tlgP?4uHtOcCPOxvkjzF`kkH9gW@k-$LU(Hn z;v4AEH*nA1JbKA9c=h5N_ypF3$gU7Ph{gP?`geB zd?2n7C&X3aD^cjb5jTjR#ARaR9AkpGMT8XNdB*OM|Ge@)$P0=$FEDnU{0s6$eny_f z;#u79h+Zh3_UmzTzaMt{dqH*Wel3dDC!)q$eNS`_*B4u6&XhZ7wZc}dAB%3x+B!eR zq2*!1d#Fv8D<9fHr4iHw(7A3HwVZYo^NL1kDa)(Uq7P-S?D0~WJ}7y9X?u%ekA%E~ zud|}cN_QxN4sS-h8bCero8i{;s3n|zX)KSmRAww~u6>wDYcpwJceQvt!@(5w$Tj$g z)GB=?Q{*wqi4WZwCYd>cT}$B~!e-bMr&E+%&#C0fqN2OzQ)=4c8+&&;mYI40!y~B; z+J^`66TWk*D|#BC=0XkgBvYvmJA=$RHB_i+@PzNpeVEQuv_&y@I){?G?XWvgMMd?E zl?j5$5~+YLfVbW|FcwBQhK#lq6hEE6%oCRmSXg}0kWq(V7AG*!(*2i8|M$#98k1|7 rV6wLBI*#RPDi?~ILzT*v^7m}0XpY-|H>a`qqL&Zm!+qe(cVWK(kJJQO literal 0 HcmV?d00001 diff --git a/locales/sv/LC_MESSAGES/html_validator_w3c.po b/locales/sv/LC_MESSAGES/html_validator_w3c.po new file mode 100644 index 00000000..f77182c1 --- /dev/null +++ b/locales/sv/LC_MESSAGES/html_validator_w3c.po @@ -0,0 +1,34 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:15+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 6 - HTML" + +msgid "TEXT_REVIEW_HTML_VERY_GOOD" +msgstr "* Inga fel i HTML-koden.\n" + +msgid "TEXT_REVIEW_HTML_IS_GOOD" +msgstr "* Den testade sidan har {0} st fel i sin HTML-kod.\n" + +msgid "TEXT_REVIEW_HTML_IS_OK" +msgstr "* Den testade sidan har {0} st fel i sin HTML-kod.\n" + +msgid "TEXT_REVIEW_HTML_IS_BAD" +msgstr "* Den testade sidan har {0} st fel i sin HTML-kod. Det är inte så bra.\n" + +msgid "TEXT_REVIEW_HTML_IS_VERY_BAD" +msgstr "* Den testade sidan har massor med fel i sin HTML-kod. Hela {0} st. \n" diff --git a/locales/sv/LC_MESSAGES/page_not_found.po b/locales/sv/LC_MESSAGES/page_not_found.po index f3a1ea8e..dc30d560 100644 --- a/locales/sv/LC_MESSAGES/page_not_found.po +++ b/locales/sv/LC_MESSAGES/page_not_found.po @@ -1,4 +1,4 @@ -# English (default). +# Swedish # Copyright (C) 2020 WebPerf # FIRST AUTHOR , 2020. # diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index b4898f31..224ac6bf 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -69,10 +69,6 @@ msgstr "###############################\nKör test: 8 - Google Lighthouse progre msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" msgstr "###############################\nKör test: 5 - Google Lighthouse god praxis" -#: default.py:111 -msgid "TEXT_TEST_HTML" -msgstr "###############################\nKör test: 6 - HTML" - #: default.py:114 msgid "TEXT_TEST_CSS" msgstr "###############################\nKör test: 7 - CSS" diff --git a/locales/webperf-core.pot b/locales/webperf-core.pot deleted file mode 100644 index 0c3fc9b5..00000000 --- a/locales/webperf-core.pot +++ /dev/null @@ -1,156 +0,0 @@ -# SOME DESCRIPTIVE TITLE. -# Copyright (C) YEAR ORGANIZATION -# FIRST AUTHOR , YEAR. -# -msgid "" -msgstr "" -"Project-Id-Version: PACKAGE VERSION\n" -"POT-Creation-Date: 2020-05-23 17:29+0200\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: pygettext.py 1.5\n" - - -#: tests/page_not_found.py:35 -msgid "TEXT_TEST_404_REVIEW_WRONG_STATUS_CODE" -msgstr "" - -#: tests/page_not_found.py:55 -msgid "TEXT_TEST_404_REVIEW_NO_TITLE" -msgstr "" - -#: tests/page_not_found.py:65 -msgid "TEXT_TEST_404_REVIEW_MAIN_HEADER" -msgstr "" - -#: tests/page_not_found.py:124 -msgid "TEXT_TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" -msgstr "" - -#: tests/page_not_found.py:131 -msgid "TEXT_TEST_404_REVIEW_ERROR_MSG_UNDER_150" -msgstr "" - -#: tests/page_not_found.py:134 -msgid "TEXT_TEST_REVIEW_NO_REMARKS" -msgstr "" - -#: default.py:24 -msgid "TEXT_TEST_START_HEADER" -msgstr "" - -#: default.py:28 -msgid "TEXT_TESTING_NUMBER_OF_SITES" -msgstr "" - -#: default.py:33 -msgid "TEXT_TESTING_SITE" -msgstr "" - -#: default.py:61 -msgid "TEXT_SITE_RATING" -msgstr "" - -#: default.py:63 -msgid "TEXT_SITE_REVIEW" -msgstr "" - -#: default.py:81 -msgid "TEXT_EXCEPTION" -msgstr "" - -#: default.py:89 -msgid "TEXT_TESTING_START_HEADER" -msgstr "" - -#: default.py:93 -msgid "TEST_GOOGLE_LIGHTHOUSE" -msgstr "" - -#: default.py:96 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y" -msgstr "" - -#: default.py:99 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO" -msgstr "" - -#: default.py:102 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA" -msgstr "" - -#: default.py:105 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" -msgstr "" - -#: default.py:107 -msgid "TEXT_TEST_PAGE_NOT_FOUND" -msgstr "" - -#: default.py:111 -msgid "TEXT_TEST_HTML" -msgstr "" - -#: default.py:114 -msgid "TEXT_TEST_CSS" -msgstr "" - -#: default.py:117 -msgid "TEXT_TEST_WEBBKOLL" -msgstr "" - -#: default.py:120 -msgid "TEXT_TEST_STANDARD_FILES" -msgstr "" - -#: default.py:127 -msgid "TEXT_TEST_VALID_ARGUMENTS" -msgstr "" - -#: default.py:128 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" -msgstr "" - -#: default.py:129 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" -msgstr "" - -#: default.py:130 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y" -msgstr "" - -#: default.py:131 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA" -msgstr "" - -#: default.py:132 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" -msgstr "" - -#: default.py:133 -msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" -msgstr "" - -#: default.py:134 -msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" -msgstr "" - -#: default.py:135 -msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" -msgstr "" - -#: default.py:136 -msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" -msgstr "" - -#: default.py:137 -msgid "TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES" -msgstr "" - -#: default.py:248 default.py:279 -msgid "TEXT_COMMAND_USAGE" -msgstr "" diff --git a/tests/w3c_validate_html.py b/tests/html_validator_w3c.py similarity index 72% rename from tests/w3c_validate_html.py rename to tests/html_validator_w3c.py index 14782232..357950e8 100644 --- a/tests/w3c_validate_html.py +++ b/tests/html_validator_w3c.py @@ -10,6 +10,8 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout @@ -22,6 +24,12 @@ def run_test(langCode, url): points = 0.0 review = '' + language = gettext.translation('html_validator_w3c', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + ## kollar koden try: url = 'https://validator.w3.org/nu/?doc={0}'.format(url.replace('/', '%2F').replace(':', '%3A')) @@ -38,18 +46,18 @@ def run_test(langCode, url): if errors == 0: points = 5.0 - review = '* Inga fel i HTML-koden.\n' + review = _('TEXT_REVIEW_HTML_VERY_GOOD') elif errors <= 5: points = 4.0 - review = '* Den testade sidan har {0} st fel i sin HTML-kod.\n'.format(errors) + review = _('TEXT_REVIEW_HTML_IS_GOOD').format(errors) elif errors <= 15: points = 3.0 - review = '* Den testade sidan har {0} st fel i sin HTML-kod.\n'.format(errors) + review = _('TEXT_REVIEW_HTML_IS_OK').format(errors) elif errors <= 30: points = 2.0 - review = '* Den testade sidan har {0} st fel i sin HTML-kod. Det är inte så bra.\n'.format(errors) + review = _('TEXT_REVIEW_HTML_IS_BAD').format(errors) elif errors > 30: points = 1.0 - review = '* Den testade sidan har massor med fel i sin HTML-kod. Hela {0} st. \n'.format(errors) + review = _('TEXT_REVIEW_HTML_IS_VERY_BAD').format(errors) return (points, review) From abd48035a0de31ef85fefc28a77cbdaa86a6b72b Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 10:36:35 +0100 Subject: [PATCH 18/49] some dividers added --- .../frontend_quality_yellow_lab_tools.mo | Bin 1727 -> 1759 bytes .../frontend_quality_yellow_lab_tools.po | 2 +- locales/en/LC_MESSAGES/html_validator_w3c.mo | Bin 944 -> 979 bytes locales/en/LC_MESSAGES/html_validator_w3c.po | 2 +- locales/en/LC_MESSAGES/page_not_found.mo | Bin 1046 -> 1078 bytes locales/en/LC_MESSAGES/page_not_found.po | 2 +- locales/en/LC_MESSAGES/yellow_lab_tools.mo | Bin 1720 -> 0 bytes .../frontend_quality_yellow_lab_tools.mo | Bin 1770 -> 1802 bytes .../frontend_quality_yellow_lab_tools.po | 2 +- locales/sv/LC_MESSAGES/html_validator_w3c.mo | Bin 939 -> 971 bytes locales/sv/LC_MESSAGES/html_validator_w3c.po | 2 +- locales/sv/LC_MESSAGES/page_not_found.mo | Bin 1036 -> 1068 bytes locales/sv/LC_MESSAGES/page_not_found.po | 2 +- locales/sv/LC_MESSAGES/webperf-core.mo | Bin 3827 -> 3744 bytes 14 files changed, 6 insertions(+), 6 deletions(-) delete mode 100644 locales/en/LC_MESSAGES/yellow_lab_tools.mo diff --git a/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo b/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo index bb7c826ebb59c01aca9ef18c9e4fdb756857a6c2..424cb27b67a475b9c7b1cb57a3a6d54b47c7d816 100644 GIT binary patch delta 88 zcmdnbd!KiM45Mu&0|P@mD+7ZPkiH3|Wr6e`AZ-bxrPvr4#DR1OkOs-;0BJ)Y-4CQe a>JDy>WQ=EF3wFN02!12O$0dzRxi<>4rv%KAD;C%zHC$^85L-UupO} zhwm%+{-}(V&z&bU?L6#H!Dqk>d>Z@=d=dPz=6B%Buzv>6fq#HX_ZO)0&zxD>&)0kt zRP`=_DsF-=fp@@X!DCR}>5Dr44XDoh9#pztK!)0Xf&_c(tfpN6E$|{3)%NE#zXe}~ z|0nQu@HbG^(Vo<_*MJM4y2q887C3_KgQ~9|K$ZV1cm@0$d;!#-TGnHr0ec22-PfSz zv0I++95T0US$@xLciGZ4X!n@E+wS_E;qVd9^;HMLe#hw#EOyKEMk~s$)BpcKzipd? z1FP+sj=frK&#}E#`}+O?b62an44&bdX4r8Z%UU`4CNq22yrsiEux-=s`5uy%o*{G5 zEv{zonZ2b;1zFd3Y`@D6%?|TMgL(=#+3wKvm~Rf%ESBmOU(az)JDnS+bU1Ldu|O55 zstj^*#3xcWL;aBFMJ&=54ceU>cm{%Mh(R?5j;D8W9!Ny>ZXkJ!cD8r6bz?_23^G1w z8Sibu-iAM1*k~wdnFVmW>`yd5*?IeRt8KY!Pv( ziZ!zZ8O`?_+B@VNLy#l{Rh4%C0f|zQn393PKrlcDR2E#h#z9C};1=C~uqkvD90$cD zk2ATZKn6|3JWKe!SmGKh$yoZwh2}vTXC-Ehhbh()>kCcFLW((I>QhW?qvUA6aN~+{HNiTX^K(e)e2-7mq@1(PZDuL zRxl<{h@_ym_RAo_Fi4~{&8x=_-^Q?qJjZ)rB+W+6*+}i)(IzB zZATrQaW#e_mYm{(?w0CDp-zQ3kos=43S-K~sb$QJT$3*{i%x#P%m&0Pj0^ygAPElu delta 14 VcmZ3((Zexe3Zu)$sb$QJ3;-r@1abfX diff --git a/locales/sv/LC_MESSAGES/page_not_found.po b/locales/sv/LC_MESSAGES/page_not_found.po index dc30d560..cd76808d 100644 --- a/locales/sv/LC_MESSAGES/page_not_found.po +++ b/locales/sv/LC_MESSAGES/page_not_found.po @@ -16,7 +16,7 @@ msgstr "" msgid "TEXT_RUNNING_TEST" -msgstr "###############################\nKör test: 2 - 404 (sida finns inte)" +msgstr "###############################\nKör test: 2 - 404 (sida finns inte)\n###############################" msgid "TEXT_REVIEW_WRONG_STATUS_CODE" msgstr "* Fel statuskod. Fick {0} när 404 vore korrekt.\n" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 47e9d02c6c63c6d83f220917fe232256939d7445..9189a2bbf2181d2fbc760becd7dc7934b3270b9c 100644 GIT binary patch delta 545 zcmYMxu}cC`9Ki9Pr&-pW)p3N9mr_Hei4qM35#g9nNkItx0WC$~($u-yE|F7XQ%Dyy zwggU%fgm&#O+gJobKf6#(81&L`|<9*-+RCJ<6gVos~u|#UzU+!I1$bMKhqGgNi9rb z4kxe_x{123hLd=LX>6i}9h}A+%;6I*;s;XGlksR!yo4zxc5n)hP&e|>#Vah}1D5d} z*U*V^7~Bp$#~JclT)^khUz{b+4vFNkf;RQ#m_Q3QQ4es58+4(IpPceL9xNCnM24A< zkBSgV67^;&)b%SkhE-g~1Kh$3tl1ph3k)&O4NV<~$4rUqq5l#21mI6haCcLG3I}VPRt-N=hHC zB3K+qdTX%}t6(XDc50!O|40fK_P$xT+1Z`Fus*Enb39ZNhQmzg4Dk6k5{5{ewS_TE zV+ZD&7Etpl*ok}Chez0sS2%!mbnp>J@e6H{s@TE5K=P;s%GitB*nc7TRDltSVkB3aSU&eEv13_M6cL|VcM)u*pHUB&?T6`b@Z@?TKLX4jY1%_ aP+D2`SA*Xz=9J?(x{{Mh)b_K{4dWMKJU5vD From fbc75b287638f3930e61798d599ed636f5fd19b3 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 10:43:03 +0100 Subject: [PATCH 19/49] changing test names new consistent name format is: _ If there is no provider and we have written the test ourself it looks like this: --- default.py | 12 ++++++------ tests/{lighthouse_a11y.py => a11y_lighthouse.py} | 0 ..._best_practice.py => best_practice_lighthouse.py} | 0 tests/{w3c_validate_css.py => css_validator_w3c.py} | 0 tests/{lighthouse.py => performance_lighthouse.py} | 0 tests/{lighthouse_pwa.py => pwa_lighthouse.py} | 0 tests/{lighthouse_seo.py => seo_lighthouse.py} | 0 7 files changed, 6 insertions(+), 6 deletions(-) rename tests/{lighthouse_a11y.py => a11y_lighthouse.py} (100%) rename tests/{lighthouse_best_practice.py => best_practice_lighthouse.py} (100%) rename tests/{w3c_validate_css.py => css_validator_w3c.py} (100%) rename tests/{lighthouse.py => performance_lighthouse.py} (100%) rename tests/{lighthouse_pwa.py => pwa_lighthouse.py} (100%) rename tests/{lighthouse_seo.py => seo_lighthouse.py} (100%) diff --git a/default.py b/default.py index af23c288..f604c9b4 100644 --- a/default.py +++ b/default.py @@ -39,21 +39,21 @@ def testsites(langCode, sites, test_type=None, show_reviews=False, only_test_unt elif test_type == TEST_HTML: from tests.html_validator_w3c import run_test elif test_type == TEST_CSS: - from tests.w3c_validate_css import run_test + from tests.css_validator_w3c import run_test elif test_type == TEST_WEBBKOLL: from tests.privacy_webbkollen import run_test elif test_type == TEST_GOOGLE_LIGHTHOUSE: - from tests.lighthouse import run_test + from tests.performance_lighthouse import run_test elif test_type == TEST_GOOGLE_LIGHTHOUSE_SEO: - from tests.lighthouse_seo import run_test + from tests.seo_lighthouse import run_test elif test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE: - from tests.lighthouse_best_practice import run_test + from tests.best_practice_lighthouse import run_test elif test_type == TEST_GOOGLE_LIGHTHOUSE_PWA: - from tests.lighthouse_pwa import run_test + from tests.pwa_lighthouse import run_test elif test_type == TEST_STANDARD_FILES: from tests.standard_files import run_test elif test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y: - from tests.lighthouse_a11y import run_test + from tests.a11y_lighthouse import run_test elif test_type == TEST_YELLOW_LAB_TOOLS: from tests.frontend_quality_yellow_lab_tools import run_test diff --git a/tests/lighthouse_a11y.py b/tests/a11y_lighthouse.py similarity index 100% rename from tests/lighthouse_a11y.py rename to tests/a11y_lighthouse.py diff --git a/tests/lighthouse_best_practice.py b/tests/best_practice_lighthouse.py similarity index 100% rename from tests/lighthouse_best_practice.py rename to tests/best_practice_lighthouse.py diff --git a/tests/w3c_validate_css.py b/tests/css_validator_w3c.py similarity index 100% rename from tests/w3c_validate_css.py rename to tests/css_validator_w3c.py diff --git a/tests/lighthouse.py b/tests/performance_lighthouse.py similarity index 100% rename from tests/lighthouse.py rename to tests/performance_lighthouse.py diff --git a/tests/lighthouse_pwa.py b/tests/pwa_lighthouse.py similarity index 100% rename from tests/lighthouse_pwa.py rename to tests/pwa_lighthouse.py diff --git a/tests/lighthouse_seo.py b/tests/seo_lighthouse.py similarity index 100% rename from tests/lighthouse_seo.py rename to tests/seo_lighthouse.py From 6d3c6b2fec45fa43f81577050a90a24f8977fea3 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 10:54:23 +0100 Subject: [PATCH 20/49] translated css_validator_w3c --- default.py | 1 - locales/en/LC_MESSAGES/css_validator_w3c.mo | Bin 0 -> 968 bytes locales/en/LC_MESSAGES/css_validator_w3c.po | 34 ++++++++++++++++++++ locales/en/LC_MESSAGES/webperf-core.mo | Bin 3623 -> 3539 bytes locales/en/LC_MESSAGES/webperf-core.po | 4 --- locales/sv/LC_MESSAGES/css_validator_w3c.mo | Bin 0 -> 960 bytes locales/sv/LC_MESSAGES/css_validator_w3c.po | 34 ++++++++++++++++++++ locales/sv/LC_MESSAGES/webperf-core.mo | Bin 3744 -> 3663 bytes locales/sv/LC_MESSAGES/webperf-core.po | 4 --- tests/css_validator_w3c.py | 18 ++++++++--- 10 files changed, 81 insertions(+), 14 deletions(-) create mode 100644 locales/en/LC_MESSAGES/css_validator_w3c.mo create mode 100644 locales/en/LC_MESSAGES/css_validator_w3c.po create mode 100644 locales/sv/LC_MESSAGES/css_validator_w3c.mo create mode 100644 locales/sv/LC_MESSAGES/css_validator_w3c.po diff --git a/default.py b/default.py index f604c9b4..ce92f17a 100644 --- a/default.py +++ b/default.py @@ -111,7 +111,6 @@ def testing(langCode, sites, test_type= TEST_ALL, show_reviews= False): if (test_type == TEST_ALL or test_type == TEST_HTML): tests.extend(testsites(langCode, sites, test_type=TEST_HTML, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_CSS): - print(_('TEXT_TEST_CSS')) tests.extend(testsites(langCode, sites, test_type=TEST_CSS, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_WEBBKOLL): print(_('TEXT_TEST_WEBBKOLL')) diff --git a/locales/en/LC_MESSAGES/css_validator_w3c.mo b/locales/en/LC_MESSAGES/css_validator_w3c.mo new file mode 100644 index 0000000000000000000000000000000000000000..a32ec522d65289b6d41dd08f6c33aa25bc262b93 GIT binary patch literal 968 zcmb_Z-EPw`6gEE+DB=brK){fWv^yq;v|?fk8zO5>iAIyEbqnJrc&(cTWp-qHK!tb% zxa18GPr*}g#|@9bGjK9htVm@Sh@*V^eC+RhKF9fS<@_gtaRIT0;B~STA8>sQ@ey$y z@fmRy@fC3k@eOeU@e^?waq$cx3~{q&%wL`*q>21x!QYWPm|r?a$ZcefJVZ9ggtO;7 z>$6eJo@K4UAd3gtZq#12-AU7x`{~{)^@#QNS2`UZEO`x+Bu+XRX9G@pM!%9{N8jrU}c)gU!Nm9 z==NFEr`?pcBY2XA-FV|gnlSIFu#R#glr4qRCWJ%bTp=uMFBXs9$b(9nsc+>&RIB_* z1z-wvcq{6%A6*leUTX(lAMWG(RcqWXMZfD3J z)2$O--P2SW%G6j-3YCYjbx=63Bb78b_=N7xL#XDtbhVjZ&7t9MdgKo9nM8k!5~#3O znBg?kBt&6?Wi`L06vB?L% literal 0 HcmV?d00001 diff --git a/locales/en/LC_MESSAGES/css_validator_w3c.po b/locales/en/LC_MESSAGES/css_validator_w3c.po new file mode 100644 index 00000000..bc12d1e2 --- /dev/null +++ b/locales/en/LC_MESSAGES/css_validator_w3c.po @@ -0,0 +1,34 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 7 - CSS\n###############################" + +msgid "TEXT_REVIEW_CSS_VERY_GOOD" +msgstr "* No errors in the CSS code.\n" + +msgid "TEXT_REVIEW_CSS_IS_GOOD" +msgstr "* The tested page has {0} errors in its CSS code.\n" + +msgid "TEXT_REVIEW_CSS_IS_OK" +msgstr "* The tested page has {0} errors in its CSS code.\n" + +msgid "TEXT_REVIEW_CSS_IS_BAD" +msgstr "* The tested page has {0} errors in its CSS code. It is not so good.\n" + +msgid "TEXT_REVIEW_CSS_IS_VERY_BAD" +msgstr "* The tested page has lots of errors in its CSS code. A total of {0}. \n" diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 470ec649a1bc3fad32453bf638cda7fb9010ea2a..77c4667b3fd5d8a9961ab74052ebc20c93f236d8 100644 GIT binary patch delta 534 zcmYMxzb}J97{KwT?^~+%67QR6R9h9lS|s|4Ni0YZmWDq-Fq$M5(&3#fCNxcyRTzX0 zU2P%;lNfaJ2VfCNClcQ$y~HJ#&pnrW?w-55O1&h5H!oZkS|2?{cS0Kdw;y7U@E1F= z%c5h(=%eP%p^NL7z#UBDA@<`Xj^ZO`@T+Mct}W8T#3aUX1$Bc>Ok)wpv4TE6;50Tc zkAAr62I?h_Fo#uR9S4X*Eh1w$gnFR@#w8HhVxR@=;{vxl!859>w~Az$-;Ij2;koew z2Z?V`^PaIC-?4xVT*Jkf$SPKF2|tWeES@DUVxIHmj=?Z~B5TBRs1nnt1;tVdMo^yb@?=_Yj>$H@x-P*ksL~47ns{IEfel&al delta 569 zcmYMxzb^w}7{KwT*P?o@R&#{3+=cqtP>YBdG(?&hG}YNEmNxMZxNc^-ASy;-LefDj z5@HiEm@F3QC`?UEe4n_)C2v0O^WOWu&vVbcq|VarTQamSJT@cEumT$WKT$&@Nf^T} zbTENA-#qHM5_V%1`>}>GtYZq#(ZL6tz$TK@m3Z(kc@Fz{QN&){LcPev0X)JiUgHeD z<2+g+iorGC9ULUSz;S%?ZQ&5H-6k@LOPD0TRG4VO8fpQDxI!1MP^)jXi;S_p*C`Ul z8{c~zCVoUc_lYKkBOMz^CX{_P`o?uK9y)w~@f4uL&ad5d@_P&gC_`nA-bqz#HMon!0@)F#W754~EwU26)Ub5Rg>on9F8x<+Yfmp7zEAF5u}#_jc_-wJ8ni;c6Rf`syTW-UjpebH%1ourwGT6CZ6*zDm6In2IGUrLxCS4S zTcvMgiabU+^C3FGBs2T4Whwm2*ayf3vu z`)DVA$~TXFMNJdbT&QuLWGeMxbCg-9i3&9}p78C35A#Kewn*mA7f^9G9CjBPs3^a& zGC?p|A{EdD@ZS3X#=-=rkkPUN`rrI@p19<yeo>Cf6{- tWL3#6oXWBz7m9lWmCDjs_JxXiSUY_, 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 7 - CSS\n###############################" + +msgid "TEXT_REVIEW_CSS_VERY_GOOD" +msgstr "* Inga fel i CSS-koden.\n" + +msgid "TEXT_REVIEW_CSS_IS_GOOD" +msgstr "* Den testade sidan har {0} st fel i sin CSS-kod.\n" + +msgid "TEXT_REVIEW_CSS_IS_OK" +msgstr "* Den testade sidan har {0} st fel i sin CSS-kod.\n" + +msgid "TEXT_REVIEW_CSS_IS_BAD" +msgstr "* Den testade sidan har {0} st fel i sin CSS-kod. Det är inte så bra.\n" + +msgid "TEXT_REVIEW_CSS_IS_VERY_BAD" +msgstr "* Den testade sidan har massor med fel i sin CSS-kod. Hela {0} st. \n" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 9189a2bbf2181d2fbc760becd7dc7934b3270b9c..e5b1a078fe0ade7d93bf704713bc6e13bc2a68d4 100644 GIT binary patch delta 534 zcmYMxF-XHe6oBE^q*`sGm?FVqYot0@YiZn45J7ONgNV*zbr+|CIAjq7C$$K=Ik`F% zK@f2eac~e9ane=jViyNd@PAyvBbP5Pxy!wGcb0p~MsMl(vCu~7S-KO`=)e6CrwMj$t@(5dG_uOqJ-1Leh`2K-h z^2tpr`$kRJONykhgjKBI67J#xzTh(E8MK0RoWVydv%d5=W}y%nD;ua8Z(;!taSX4q zi0?RssXnuChg(YmG?wrqgHXQ4uk+I$8opqaNUM5@iA7V>0 A;s5{u delta 564 zcmYMxJxIe)5Ww+kQ(O9}mLL^V8+B0oL5m0~h)CTmC?$djjt(x0;BLFtwG>eo7r{xy z3XWZbPU0j8f`j7JK_|h@|B--!$L}u5d-vYGC+i|zdrJmd!joZG4AZCC{}c0zBuV3# zKpO|J=(&tKuZn}XkHc8UIG*7MUZIT-IE|l3&6@NZZt)_f_^^&cxQn__4bymvIlRXb ze&7O{0XBnco<}%JevMQ3==p==f_2mloZu2&xWOK~{0h1gUc(|G zz6T>Bgpxo#SqgRjEJm?{Gq{PXc#Kti#tKgIlY@u2L4D~GETR>2PqKws@&+ 20: points = 1.0 - review = '* Den testade sidan har massor med fel i sin CSS-kod. Hela {0} st. \n'.format(errors) + review = _('TEXT_REVIEW_CSS_IS_VERY_BAD').format(errors) return (points, review) From bc8981bbfa08673663b8e5c13a2d8ddc428ddcf6 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 11:14:45 +0100 Subject: [PATCH 21/49] translated standard_files --- default.py | 1 - locales/en/LC_MESSAGES/standard_files.mo | Bin 0 -> 1089 bytes locales/en/LC_MESSAGES/standard_files.po | 43 +++++++++++++++++++++++ locales/en/LC_MESSAGES/webperf-core.mo | Bin 3539 -> 3433 bytes locales/en/LC_MESSAGES/webperf-core.po | 4 --- locales/sv/LC_MESSAGES/standard_files.mo | Bin 0 -> 1087 bytes locales/sv/LC_MESSAGES/standard_files.po | 43 +++++++++++++++++++++++ locales/sv/LC_MESSAGES/webperf-core.mo | Bin 3663 -> 3561 bytes locales/sv/LC_MESSAGES/webperf-core.po | 4 --- tests/standard_files.py | 25 ++++++++----- 10 files changed, 103 insertions(+), 17 deletions(-) create mode 100644 locales/en/LC_MESSAGES/standard_files.mo create mode 100644 locales/en/LC_MESSAGES/standard_files.po create mode 100644 locales/sv/LC_MESSAGES/standard_files.mo create mode 100644 locales/sv/LC_MESSAGES/standard_files.po diff --git a/default.py b/default.py index ce92f17a..81829e4f 100644 --- a/default.py +++ b/default.py @@ -116,7 +116,6 @@ def testing(langCode, sites, test_type= TEST_ALL, show_reviews= False): print(_('TEXT_TEST_WEBBKOLL')) tests.extend(testsites(langCode, sites, test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_STANDARD_FILES): - print(_('TEXT_TEST_STANDARD_FILES')) tests.extend(testsites(langCode, sites, test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_YELLOW_LAB_TOOLS): tests.extend(testsites(langCode, sites, test_type=TEST_YELLOW_LAB_TOOLS, show_reviews=show_reviews)) diff --git a/locales/en/LC_MESSAGES/standard_files.mo b/locales/en/LC_MESSAGES/standard_files.mo new file mode 100644 index 0000000000000000000000000000000000000000..0c94031d9d14b43dbce0d69b9a756e652ba338e8 GIT binary patch literal 1089 zcmaKp!EVz)5QYQvq7r-U34Bh~bKp*NuHN>B;=5lwy-{3uP=~^zg3a*3iz}Mh? z@H6-T{0=?@|A2SF)dl=T{!!Fw20K9%wrbTdtk(DD_JaLs7>2D~->kVWueiB}xB#QuWW@vl*@M6TyyH{xvI zkuQ6RFn!v#2|;xgyI*Nv3UhV>V&fG8^f literal 0 HcmV?d00001 diff --git a/locales/en/LC_MESSAGES/standard_files.po b/locales/en/LC_MESSAGES/standard_files.po new file mode 100644 index 00000000..0591707a --- /dev/null +++ b/locales/en/LC_MESSAGES/standard_files.po @@ -0,0 +1,43 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 11:15+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 9 - Standard files\n###############################" + +msgid "TEXT_ROBOTS_MISSING" +msgstr "* robots.txt is missing, not allowed to download or has no expected content.\n" + +msgid "TEXT_ROBOTS_OK" +msgstr "* robots.txt seems ok.\n" + +msgid "TEXT_SITEMAP_MISSING" +msgstr "* Sitemap is not specified in robots.txt\n" + +msgid "TEXT_SITEMAP_FOUND" +msgstr "* Sitemap is mentioned in robots.txt\n" + +msgid "TEXT_SITEMAP_BROKEN" +msgstr "* Sitemap seems to be broken.\n" + +msgid "TEXT_SITEMAP_OK" +msgstr "* Sitemap seems to work.\n" + +msgid "TEXT_RSS_FEED_MISSING" +msgstr "* RSS subscription is missing in meta.\n" + +msgid "TEXT_RSS_FEED_FOUND" +msgstr "* RSS subscription found.\n" diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 77c4667b3fd5d8a9961ab74052ebc20c93f236d8..43588d773f8afe66df1224baafc1e6943f5dfd95 100644 GIT binary patch delta 535 zcmYMxze~eF6u|M9Hfl|()`C)v)%vUT2l#8z;#Arq2yHdC=vJM?K`4$o1VjX9(Ln^) zZWS^(>s}CCIw@|7`WLwPJreNX_}shP-FtU=u|5Z#*QkCav^af;u4^j#Z{NgG!XNBb z(*cnP^!O%F?OViNEaCv}qJ_tpz-yew2OPn7Pe-(%NIw&!s5Y*kD#pfHtlLSTDFJBsC;)rZ8$aA{`yrHT_yGV+K>zyJYJn}uk zapE(KVI56;#1b~q#<(G}i8WlqCtribs&E@8@e~d6OP#?4x~LMjFo_cOo;Ky1Lq5qK zs;51~KD$nk^LynPRS5*>b~yCC%LqW5B$yjh3he z*VbrgiiVupE0+kI3i=NHd z#E_-oGO9});V|AARgC;s&ax zJ~q5XQsfEMQ@>&-exv##TdT+fdT|<)sLt6(weK}nYNn{ku@H-7VoQnSd^}sx&1viD dp}k!T>*IQMH@B6~?d0{f_082nCGGGu{Q=86J+=S< diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index c802c779..e80f191e 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -73,10 +73,6 @@ msgstr "###############################\nRunning test: 5 - Google Lighthouse Bes msgid "TEXT_TEST_WEBBKOLL" msgstr "###############################\nRunning test: 20 - Webbkoll" -#: default.py:120 -msgid "TEXT_TEST_STANDARD_FILES" -msgstr "###############################\nRunning test: 9 - Standard files" - #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Valid arguments for option -t/--test:" diff --git a/locales/sv/LC_MESSAGES/standard_files.mo b/locales/sv/LC_MESSAGES/standard_files.mo new file mode 100644 index 0000000000000000000000000000000000000000..b9d573d96fe3f42d7c9253e038843c0a6904d4d5 GIT binary patch literal 1087 zcmaJ;!EVz)5Otv^A3zk45GrUy5eLeKq@q$W1(7Chi9+Hib}B%KXj`unQ)eCRZqw#N z_=EP!kqdCj7jWf(_y|6L=O$5`O6@97&olFO_RZwa!u%Ht#|9RGBn!@*ANalr`~=T`!^R&2C4nqJkd&*sjwo62UoO_T7^Q0Zr}Ygi>@+d}C$l@2wE zrD}1@rERzASG>C2@B&s+f*Vv>nH%BIN`58J3M=?gA@4j}T}GUTkTu01&bmx=@7-dP z)jU=%(i_jIRhB<^b=Q8&wPAtcQYYM`%Ap=NCgz&fCm*j5#7Xcjh%nY&S~m!1gtbb|J)NSE%iEj+W$4Qbq*L5J&9xkMN zCxxc(v%V5?znA&KzNs4, 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 11:15+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 9 - Standardfiler\n###############################" + +msgid "TEXT_ROBOTS_MISSING" +msgstr "* robots.txt saknas, får inte lov att hämtas eller har inte förväntat innehåll.\n" + +msgid "TEXT_ROBOTS_OK" +msgstr "* robots.txt verkar ok.\n" + +msgid "TEXT_SITEMAP_MISSING" +msgstr "* Sitemap anges inte i robots.txt\n" + +msgid "TEXT_SITEMAP_FOUND" +msgstr "* Sitemap finns omnämnd i robots.txt\n" + +msgid "TEXT_SITEMAP_BROKEN" +msgstr "* Sitemap verkar vara trasig.\n" + +msgid "TEXT_SITEMAP_OK" +msgstr "* Sitemap verkar fungera.\n" + +msgid "TEXT_RSS_FEED_MISSING" +msgstr "* RSS-prenumeration saknas i meta.\n" + +msgid "TEXT_RSS_FEED_FOUND" +msgstr "* RSS-prenumeration hittad.\n" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index e5b1a078fe0ade7d93bf704713bc6e13bc2a68d4..0c5928be43467200360fc0444d8e9a49232a422a 100644 GIT binary patch delta 535 zcmYMxzb`{k6u|M*_Mz(GX+$b5UP*+O__4HIB!Y#!qCY}ns7-8EmTnefF_|P1ix}P@ zCZkD2CsPxn&_6&LiSNm6;^p1g>H8G3YOaaq)g!6cXIefu6 z?6Qm*93?)$DLid^kK@Gen8Ft7K_)v)y&T4*Dze37iPIh87hTY}UrFF@<%EP+xwTXs2P@Tu~Y)h|8G9W1PVo zWUJIsue^bS*h2j!POrJCBxZ>VIE!bfg&$E$YrCb&%BJTn=T|G`qHhNm1O85^7OuIr J<7ezx@DJ{RHE#d_ delta 565 zcmYMxKS%;m9KiA4sg-#aDItZHhc#q&Xq!R=hDBKvWpj0AK@buWLNvGpK~sNdXmn|9 za0#L@8XBS{nrba-$fY3)`aVwRaqm9&yL<26@AuwY@IF*|_Pb7n)=Ll3y_$;t+Yhmy z@E6;$!$HSz!vv~%Y4qU|cHj%w$^oM-Jr8`JdJOnM=+JTWyh io~s$os{17F@%mDRY2>!8V!G%V&6Fjv5 diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index 3025cacf..f271da29 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -73,10 +73,6 @@ msgstr "###############################\nKör test: 5 - Google Lighthouse god pr msgid "TEXT_TEST_WEBBKOLL" msgstr "###############################\nKör test: 20 - Webbkoll" -#: default.py:120 -msgid "TEXT_TEST_STANDARD_FILES" -msgstr "###############################\nKör test: 9 - Standardfiler" - #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Giltiga argument att välja på -t/--test:" diff --git a/tests/standard_files.py b/tests/standard_files.py index 6fedf3f0..8a75968f 100644 --- a/tests/standard_files.py +++ b/tests/standard_files.py @@ -10,6 +10,8 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout @@ -22,6 +24,13 @@ def run_test(langCode, url): * at least one sitemap/siteindex mentioned in robots.txt * a RSS feed mentioned in the page's meta """ + + language = gettext.translation('standard_files', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + o = urllib.parse.urlparse(url) parsed_url = '{0}://{1}/'.format(o.scheme, o.netloc) robots_content = httpRequestGetContent(parsed_url + 'robots.txt') @@ -33,18 +42,18 @@ def run_test(langCode, url): if robots_content == None or ('user-agent' not in robots_content.lower() and 'disallow' not in robots_content.lower() and 'allow' not in robots_content.lower()): points -= 3 - review += '* robots.txt saknas, får inte lov att hämtas eller har inte förväntat innehåll.\n' + review += _("TEXT_ROBOTS_MISSING") return_dict['robots.txt'] = 'missing content' else: - review += '* robots.txt verkar ok.\n' + review += _("TEXT_ROBOTS_OK") return_dict['robots.txt'] = 'ok' if 'sitemap:' not in robots_content.lower(): points -= 2 - review += '* Sitemap anges inte i robots.txt\n' + review += _("TEXT_SITEMAP_MISSING") return_dict['sitemap'] = 'not in robots.txt' else: - review += '* Sitemap finns omnämnd i robots.txt\n' + review += _("TEXT_SITEMAP_FOUND") return_dict['sitemap'] = 'ok' smap_pos = robots_content.lower().find('sitemap') @@ -62,10 +71,10 @@ def run_test(langCode, url): if not is_sitemap(smap_content): points -= 1 - review += '* Sitemap verkar vara trasig.\n' + review += _("TEXT_SITEMAP_BROKEN") return_dict['sitemap_check'] = '\'{0}\' seem to be broken'.format(found_smaps[0]) else: - review += '* Sitemap verkar fungera.\n' + review += _("TEXT_SITEMAP_OK") return_dict['sitemap_check'] = '\'{0}\' seem ok'.format(found_smaps[0]) # TODO: validate first feed @@ -78,11 +87,11 @@ def run_test(langCode, url): if len(feed) == 0: points -= 0.5 - review += '* RSS-prenumeration saknas i meta.\n' + review += _("TEXT_RSS_FEED_MISSING") return_dict['feed'] = 'not in meta' return_dict['num_feeds'] = len(feed) elif len(feed) > 0: - review += '* RSS-prenumeration hittad.\n' + review += _("TEXT_RSS_FEED_FOUND") return_dict['feed'] = 'found in meta' return_dict['num_feeds'] = len(feed) tmp_feed = [] From 53e68d662705cbb0a38acf0e4abd3da6aecfe026 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 20:56:18 +0100 Subject: [PATCH 22/49] translated privacy_webbkollen --- default.py | 1 - locales/en/LC_MESSAGES/privacy_webbkollen.mo | Bin 0 -> 846 bytes locales/en/LC_MESSAGES/privacy_webbkollen.po | 34 +++++++++++++++++++ locales/en/LC_MESSAGES/webperf-core.mo | Bin 3433 -> 3338 bytes locales/en/LC_MESSAGES/webperf-core.po | 4 --- locales/sv/LC_MESSAGES/privacy_webbkollen.mo | Bin 0 -> 870 bytes locales/sv/LC_MESSAGES/privacy_webbkollen.po | 34 +++++++++++++++++++ locales/sv/LC_MESSAGES/webperf-core.mo | Bin 3561 -> 3469 bytes locales/sv/LC_MESSAGES/webperf-core.po | 4 --- tests/privacy_webbkollen.py | 18 +++++++--- 10 files changed, 81 insertions(+), 14 deletions(-) create mode 100644 locales/en/LC_MESSAGES/privacy_webbkollen.mo create mode 100644 locales/en/LC_MESSAGES/privacy_webbkollen.po create mode 100644 locales/sv/LC_MESSAGES/privacy_webbkollen.mo create mode 100644 locales/sv/LC_MESSAGES/privacy_webbkollen.po diff --git a/default.py b/default.py index 81829e4f..640d28e5 100644 --- a/default.py +++ b/default.py @@ -113,7 +113,6 @@ def testing(langCode, sites, test_type= TEST_ALL, show_reviews= False): if (test_type == TEST_ALL or test_type == TEST_CSS): tests.extend(testsites(langCode, sites, test_type=TEST_CSS, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_WEBBKOLL): - print(_('TEXT_TEST_WEBBKOLL')) tests.extend(testsites(langCode, sites, test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_STANDARD_FILES): tests.extend(testsites(langCode, sites, test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) diff --git a/locales/en/LC_MESSAGES/privacy_webbkollen.mo b/locales/en/LC_MESSAGES/privacy_webbkollen.mo new file mode 100644 index 0000000000000000000000000000000000000000..4c838ee14f157831264164dc3ad5e1bfcd66936c GIT binary patch literal 846 zcmaJ;%We}f6de!(X?7G48x|KT@hT2UM5St45NTq8!P z_=>oR_>Q=X_=&iKI71Zq-w1leajqi@-|kh%d4xPdeu(@L`8M)5fV3L$Ht;(B+F{V8?k+Qygqlf{GOM%)C(K&G3~bGV*YA0haeY)V{1vKYGGhrR zA!jEZP?@B{Okm66@;_n6@6g39YL%+nRMK*Zz48nXZTScLJWH97n=qa*ZMc287rr4I z3te%hBd!URV-*XTc(4%(>$bV%8ZRD`?c9SbPq;0LRk9pvm37y73O%JX2<(<<)ys$5pGk@L#J4fF_*Nj$`&%C#@AD{{sx;;DiF, 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 20:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 20 - Privacy (Webbkollen)\n###############################" + +msgid "TEXT_REVIEW_VERY_GOOD" +msgstr "* The website is good on privacy!\n" + +msgid "TEXT_REVIEW_IS_GOOD" +msgstr "* The site could be better, but is ok.\n" + +msgid "TEXT_REVIEW_IS_OK" +msgstr "* Ok integrity but should get better.\n" + +msgid "TEXT_REVIEW_IS_BAD" +msgstr "* Poor integrity.\n" + +msgid "TEXT_REVIEW_IS_VERY_BAD" +msgstr "* Very bad privacy!\n" diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 43588d773f8afe66df1224baafc1e6943f5dfd95..e2c8c568434fbdc933b8594125068fd784a14cd0 100644 GIT binary patch delta 500 zcmYk(Jxc>Y5P;!1bMZ5gBw)~}i67C6ik__&2N6^dB+*72(KHtR0IR@Nv5*Ktu(2>$ z3AVvbds};BVIyjzRUioZ4!4C1$1^j>>@2(QsneAIlCUp?HDbOWP5!NCOC&+w#D4t3 z82%b#9YOCn#;JQaghd?2BOJpMoWUE+;8UP4-vmht@lep1!%^}ToWlcL#40Y~1G?BU zI<`ofdu1`6JHaC+eJJSY#0Aan+M72}I;fK>B(v1b4n{P#Y`Q zhgVp@TinJrZsBIH$U4@IZ>R@$X_~@)q$Cy0?h%|5=-@k?#wO}>*b#HJNJ%`@E8V~W jJVbqpGA`g5>VSLH{az!D4=W#Pc;V}=T0WYoEl1ry;KeZ; delta 570 zcmYMxOG`pQ6u|LgYL<^4LQ$G|dub2KYE#lmQ-abgHEL%G5mAw}tc4H})K+Q{LCf3~ zxM&yD-n|eME(7}nZTb)QLIcBZ&fJ+fXXajezCFcHx8+7?K6)SBVp7q6YY@8$f3QtW zH;HsWyP<|^-wbx*D)!s?{!7@7V4O3XfH2Ul!>sY`gd^fbSSQVbe5j;mb`K8QY7%Qj}*D!$M)SotJ7(+hE z4yva;!Y;f*^-CUb3ZHQSzfdLey7Y4+DB1W{Hn*5cCL^n{+(xF-Fon#nY&?_AZN{U~ ol~f{8>9RiBN;3|B>DAuunw{1rwawjp{^0oVuvB+yr4MK54}*6<00000 diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index e80f191e..b0ef5c38 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -69,10 +69,6 @@ msgstr "###############################\nRunning test: 8 - Google Lighthouse PWA msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" msgstr "###############################\nRunning test: 5 - Google Lighthouse Best Practice" -#: default.py:117 -msgid "TEXT_TEST_WEBBKOLL" -msgstr "###############################\nRunning test: 20 - Webbkoll" - #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Valid arguments for option -t/--test:" diff --git a/locales/sv/LC_MESSAGES/privacy_webbkollen.mo b/locales/sv/LC_MESSAGES/privacy_webbkollen.mo new file mode 100644 index 0000000000000000000000000000000000000000..6e308b909f06554125268bf1a0bf8204b958d063 GIT binary patch literal 870 zcmaKp&u-H|5XN0VAo)fS;)2kiO8hGul8Q=|DTp*#f|VwY5;vt>qIEoVmN;u`y|m;V z;DoqyNZ$Z);b@76;LxYw0T@S>8d9k%|NQNaznR%xpRQi}X3@Ao+$H>Cu`oWZzk_+NsrSk`r-i0@vptTJgp`jGT1={?e4qz_2{kcOlySLx;R zcmANu`!3%fxV^znwZ0r`c;1DW*Iah>dH3K#_B^Tl+3b3@ig zoykNw(5cp%)dmO3>$zUr?sz_{X%vROvbr$nz(#qa%qko7(A0UpxlZdc8Ce&PWKreJ zcfhM%*78`r3U589<*vW?+T7k1nPI*bDvO0lv;z}ij1(DcpG{sLW00agEM@qL+$#Dg zB4j>_i37c33}rTgZ9~idj4ijt&reY)mF-%h3@Yk1O=;5xKbdD~ETnn?!;#P#nwNY2 z8@6@sEB17VnsGHuLa8DLwgPGF1}fBa@R03H9Z07Unxa@KokFFwXS@Wu-z=8hoIAucA)ef@kE zhcYq{F895^+_z{EXaQ+{vQV3BYnKnZng7&uXPGH}H!$dEc_fBYcvPHfoWwDz$M)ql G)~tW_NAWHI literal 0 HcmV?d00001 diff --git a/locales/sv/LC_MESSAGES/privacy_webbkollen.po b/locales/sv/LC_MESSAGES/privacy_webbkollen.po new file mode 100644 index 00000000..b8c70873 --- /dev/null +++ b/locales/sv/LC_MESSAGES/privacy_webbkollen.po @@ -0,0 +1,34 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 20:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 20 - Privacy (Webbkollen)\n###############################" + +msgid "TEXT_REVIEW_VERY_GOOD" +msgstr "* Webbplatsen är bra på integritet!\n" + +msgid "TEXT_REVIEW_IS_GOOD" +msgstr "* Webbplatsen kan bli bättre, men är helt ok.\n" + +msgid "TEXT_REVIEW_IS_OK" +msgstr "* Ok integritet men borde bli bättre.\n" + +msgid "TEXT_REVIEW_IS_BAD" +msgstr "* Dålig integritet.\n" + +msgid "TEXT_REVIEW_IS_VERY_BAD" +msgstr "* Väldigt dålig integritet!\n" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 0c5928be43467200360fc0444d8e9a49232a422a..6af7c751cfc86b8ec8ff60f50c66f2b9579e6bfd 100644 GIT binary patch delta 495 zcmYk(JxIeq7{>7@w$=JkX-g?e{H%j*YN4Jy(lPfeI zxWit2!v$>PGS2o$i+F@9_<*bEQEV0*poewjm7XxSLGVscz<8fDfiqadHJrkIq(pTz k$8?Pud_;2;Z8SUdgC#5{?1Y<*rc;SD{8%P_cAVJy1IYL@K>z>% delta 566 zcmYMxze@sP7{Kw@Jj>K8lZwj9vmi<{ttDN8ESLOMdScLUAw)z%t+hx?OEg(cO+nBS z4Gz)N*wm0ylh7FH9}p7seYv3H-ut}I%lq&=?|bLoPj98+Q7#3~7-N*7SY-77G>JWg zE$orY9YVOF_fHLF-aPhU28VGQy?BfoUf~Qrq7OezE5c$GVvq$t%ETzjW^x$BLyX`x z#_$!x*rk{*;yCdxPUGpH4>&>mjsa|=Jjj&ItQW%}Q4wOD$r7jA!!NqJ>ogNCx+y}! zGxCZTKJr9wC>w5}6BUOLYv{%e+`=T@V;;Xz$26N|<2M+OQ#V$!>GX1D)hOg^O-tCS r>WRE=6ch1yGMh@(25ckt>WR}=bvaz>LPQH`#hp@V|Das1&Z)*PeyKj5 diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index f271da29..27220383 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -69,10 +69,6 @@ msgstr "###############################\nKör test: 8 - Google Lighthouse progre msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" msgstr "###############################\nKör test: 5 - Google Lighthouse god praxis" -#: default.py:117 -msgid "TEXT_TEST_WEBBKOLL" -msgstr "###############################\nKör test: 20 - Webbkoll" - #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Giltiga argument att välja på -t/--test:" diff --git a/tests/privacy_webbkollen.py b/tests/privacy_webbkollen.py index cec5591e..75e608ef 100644 --- a/tests/privacy_webbkollen.py +++ b/tests/privacy_webbkollen.py @@ -10,6 +10,8 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout @@ -20,6 +22,12 @@ def run_test(langCode, url): errors = 0 review = '' + language = gettext.translation('privacy_webbkollen', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + url = 'https://webbkoll.dataskydd.net/sv/check?url={0}'.format(url.replace('/', '%2F').replace(':', '%3A')) headers = {'user-agent': 'Mozilla/5.0 (compatible; Webperf; +https://webperf.se)'} request = requests.get(url, allow_redirects=False, headers=headers, timeout=request_timeout*2) @@ -70,15 +78,15 @@ def run_test(langCode, url): mess += '* {0}'.format(re.sub(' +', ' ', line.text.strip()).replace('\n', ' ').replace(' ', '\n* ').replace('Kolla upp', '').replace(' ', ' ')) if points == 5: - review = '* Webbplatsen är bra på integritet!\n' + review = ('TEXT_REVIEW_VERY_GOOD') elif points >= 4: - review = '* Webbplatsen kan bli bättre, men är helt ok.\n' + review = _('TEXT_REVIEW_IS_GOOD') elif points >= 3: - review = '* Ok integritet men borde bli bättre.\n' + review = _('TEXT_REVIEW_IS_OK') elif points >= 2: - review = '* Dålig integritet.\n' + review = _('TEXT_REVIEW_IS_BAD') else: - review = '* Väldigt dålig integritet!\n' + review = _('TEXT_REVIEW_IS_VERY_BAD') points = 1.0 review += mess From ab3bccc3ce3cb237ef9c7d2c82e21447503a5bdc Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 21:05:41 +0100 Subject: [PATCH 23/49] made sure to support en, sv, de and no in privacy test --- tests/privacy_webbkollen.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/privacy_webbkollen.py b/tests/privacy_webbkollen.py index 75e608ef..7742dbc1 100644 --- a/tests/privacy_webbkollen.py +++ b/tests/privacy_webbkollen.py @@ -28,7 +28,15 @@ def run_test(langCode, url): print(_('TEXT_RUNNING_TEST')) - url = 'https://webbkoll.dataskydd.net/sv/check?url={0}'.format(url.replace('/', '%2F').replace(':', '%3A')) + webb_kollen_lang_code = 'en' + if langCode == 'sv': + webb_kollen_lang_code = 'sv' + elif langCode == 'de': + webb_kollen_lang_code = 'de' + elif langCode == 'no': + webb_kollen_lang_code = 'no' + + url = 'https://webbkoll.dataskydd.net/{1}/check?url={0}'.format(url.replace('/', '%2F').replace(':', '%3A'), webb_kollen_lang_code) headers = {'user-agent': 'Mozilla/5.0 (compatible; Webperf; +https://webperf.se)'} request = requests.get(url, allow_redirects=False, headers=headers, timeout=request_timeout*2) From 3fbe7f1d5bed1a34dda6e0f12f1c4ac181d0a302 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 21:39:58 +0100 Subject: [PATCH 24/49] also translating summary text --- locales/en/LC_MESSAGES/webperf-core.mo | Bin 3338 -> 3338 bytes locales/en/LC_MESSAGES/webperf-core.po | 2 +- tests/privacy_webbkollen.py | 10 +++++----- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index e2c8c568434fbdc933b8594125068fd784a14cd0..71b0b488f1d949c1dc8fdf51e35773f687814747 100644 GIT binary patch delta 18 ZcmeB@>XO=UgOky4@;NTC&F?v-nE^nP237z7 delta 18 ZcmeB@>XO=UgOkx<@;NTC&F?v-nE^nF22}t6 diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index b0ef5c38..257eb913 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -51,7 +51,7 @@ msgstr "### {0} ###" #: default.py:93 msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE" -msgstr "###############################\nRunning test: 0 - Google Lighthouse Performance" +msgstr "###############################\nRunning test: 1 - Google Lighthouse Performance" #: default.py:96 msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y" diff --git a/tests/privacy_webbkollen.py b/tests/privacy_webbkollen.py index 7742dbc1..de34d5d6 100644 --- a/tests/privacy_webbkollen.py +++ b/tests/privacy_webbkollen.py @@ -28,15 +28,15 @@ def run_test(langCode, url): print(_('TEXT_RUNNING_TEST')) - webb_kollen_lang_code = 'en' + api_lang_code = 'en' if langCode == 'sv': - webb_kollen_lang_code = 'sv' + api_lang_code = 'sv' elif langCode == 'de': - webb_kollen_lang_code = 'de' + api_lang_code = 'de' elif langCode == 'no': - webb_kollen_lang_code = 'no' + api_lang_code = 'no' - url = 'https://webbkoll.dataskydd.net/{1}/check?url={0}'.format(url.replace('/', '%2F').replace(':', '%3A'), webb_kollen_lang_code) + url = 'https://webbkoll.dataskydd.net/{1}/check?url={0}'.format(url.replace('/', '%2F').replace(':', '%3A'), api_lang_code) headers = {'user-agent': 'Mozilla/5.0 (compatible; Webperf; +https://webperf.se)'} request = requests.get(url, allow_redirects=False, headers=headers, timeout=request_timeout*2) From 4d9e0988d16ce118e04edf48e7b44485ed883f3a Mon Sep 17 00:00:00 2001 From: Mattias Date: Sat, 12 Dec 2020 22:19:50 +0100 Subject: [PATCH 25/49] translated performance_lighthouse --- default.py | 1 - .../en/LC_MESSAGES/performance_lighthouse.mo | Bin 0 -> 1385 bytes .../en/LC_MESSAGES/performance_lighthouse.po | 56 ++++++++++++++++++ .../sv/LC_MESSAGES/performance_lighthouse.mo | Bin 0 -> 1437 bytes .../sv/LC_MESSAGES/performance_lighthouse.po | 56 ++++++++++++++++++ tests/performance_lighthouse.py | 33 +++++++---- 6 files changed, 133 insertions(+), 13 deletions(-) create mode 100644 locales/en/LC_MESSAGES/performance_lighthouse.mo create mode 100644 locales/en/LC_MESSAGES/performance_lighthouse.po create mode 100644 locales/sv/LC_MESSAGES/performance_lighthouse.mo create mode 100644 locales/sv/LC_MESSAGES/performance_lighthouse.po diff --git a/default.py b/default.py index 640d28e5..4d733d54 100644 --- a/default.py +++ b/default.py @@ -92,7 +92,6 @@ def testing(langCode, sites, test_type= TEST_ALL, show_reviews= False): tests = list() ############## if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE): - print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE')) tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y')) diff --git a/locales/en/LC_MESSAGES/performance_lighthouse.mo b/locales/en/LC_MESSAGES/performance_lighthouse.mo new file mode 100644 index 0000000000000000000000000000000000000000..242385e88f1c4730c562293d7a1f90e4abf3b611 GIT binary patch literal 1385 zcma))NsrSo7>2`g>H&#iMeSR;(xX_xk-lFJG2+zShuop&vnir3RbYPyFvi z|Al@6eQBGfodz#~2fzp55-7ku;7f2f_--D54<5$&6Sxoj3LXT%&&RvAuj@~N6myrs zLx?}$p=oEqH{de(5mb4Vn{}qrxicnq|-KWE0pe zf5oh1r7hoboWJIr+l#6Wb?^PP+F5B)w?iv_t4V3r#HAJ6rDd;`G4dSGtoe7SZQaUl zY-Ycyl6FHKg749mr!~{$5${Re4)hLBMU=z_X`1ERrbP+XwQQ%6YdW4@PC1i^>J=ur zK}v;EK`)l@ky6Ph6wYH@fKYe&YNTA|x_i`g^}3^1OmfRH>h}5jPJ`xZOi0~JSuBQ3 zCaFP2Ov;D}a&7)|bB%Y$JniQNzX5d|PFTowkF$|MXdDhBF(B6@h8bbquG5WO6!V2# zIf*5Yk!w0e(UL!v%i|%7;;W=LV5#8pdfTh&S2lF2ran(~8uyYQibI23=|(bV@tCLB zc%ZLL4KkjFTq?8tcuI=-OF8Wv!J{KlFyw^wCC84Qy&!`3RHPcELXr_@G5q?IA;AGn z*icmvhp1|+N|MCng0)%i4LHF;iAZu12@+wiDeK8-HRHOVdNNURF-+F-s=9?}A2&0b zH&@{mLOA9;_?Hs#>CEQ1vT?eCr>i_z*Z(Opi36dcjmfCXQ, 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 22:00+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 1 - Performance (Google Lighthouse)\n###############################" + +msgid "TEXT_REVIEW_VERY_GOOD" +msgstr "* The website loads very fast!\n" + +msgid "TEXT_REVIEW_IS_GOOD" +msgstr "* The website is fast.\n" + +msgid "TEXT_REVIEW_IS_OK" +msgstr "* Average speed.\n" + +msgid "TEXT_REVIEW_IS_BAD" +msgstr "* The website is quite slow.\n" + +msgid "TEXT_REVIEW_IS_VERY_BAD" +msgstr "* The website is very slow!\n" + +msgid "TEXT_REVIEW_OBSERVED_SPEED" +msgstr "* Observed speed: {} seconds\n" + +msgid "TEXT_REVIEW_FIRST_MEANINGFUL_PAINT" +msgstr "* First meaningful visual change: {} sec\n" + +msgid "TEXT_REVIEW_FIRST_MEANINGFUL_PAINT_3G" +msgstr "* First meaningful visual change on 3G: {} sec\n" + +msgid "TEXT_REVIEW_CPU_IDLE" +msgstr "* CPU idle after: {} sec\n" + +msgid "TEXT_REVIEW_INTERACTIVE" +msgstr "* The website is interactive: {} sec\n" + +msgid "TEXT_REVIEW_REDIRECTS" +msgstr "* Number of redirects: {}\n" + +msgid "TEXT_REVIEW_TOTAL_WEIGHT" +msgstr "* Total weight of the page: {} kb\n" + diff --git a/locales/sv/LC_MESSAGES/performance_lighthouse.mo b/locales/sv/LC_MESSAGES/performance_lighthouse.mo new file mode 100644 index 0000000000000000000000000000000000000000..1bee9fd673a395e5a201bb8adbe7f294d42677bf GIT binary patch literal 1437 zcmb7?Pmj|^7{*;nC6ohFx2dGwn<&ph+K@A&E3@!u@8 zljvWd|7s=&+P@fojQ$_`S@gB{EbA-qDtHRK4X%L-{0RI7{1E(Ox&Ie<8uQ2CC*U*i zQ}CbV{N%Bh`g0(~+Bcxl|M+I`vg)5H&pSjWuuku&PV_TW_59HUsK+X*~)$l&+fi zwC?(}76vOup%*&MXqUQ;8>@*e><>h;(>6tjLK=itN9K3_AFg?3GHrm<_+ndVheDOE~#xq^>WD%Dcy66PfcZJ$q5MtZ<>2~V-Wzk-FDm{$)FPR%gxeu!qbdpR z@;Frka!q5pB5b)Wx}Qb4SSr+Vp}D|avoV6!{GPr%&R8nGC3^!V71!VGgj@E;zRtwd z=hCKPFOO4^RLMp!)rAHZTq5z<-kep*c$RQ&%!=a~DHpF6tcwIc9hs&XlZ5v*M@sj9 zB8m^q*4F%yRGN_y7jRVlNtQ8a)r6bD{DFw&;-+^!LB{ikb7@45&@3YZln13Ug=5R2YO@=pF@h#KhsYFjs22vq z-r?9Y7RPv$RFKhZZ^$)SB%%?=`q8bhjc3Czqu^X7Ht9$VnTQ!V-^lYM, 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 22:00+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 1 - Prestanda (Google Lighthouse)\n###############################" + +msgid "TEXT_REVIEW_VERY_GOOD" +msgstr "* Webbplatsen laddar in mycket snabbt!\n" + +msgid "TEXT_REVIEW_IS_GOOD" +msgstr "* Webbplatsen är snabb.\n" + +msgid "TEXT_REVIEW_IS_OK" +msgstr "* Genomsnittlig hastighet.\n" + +msgid "TEXT_REVIEW_IS_BAD" +msgstr "* Webbplatsen är ganska långsam.\n" + +msgid "TEXT_REVIEW_IS_VERY_BAD" +msgstr "* Webbplatsen är väldigt långsam!\n" + +msgid "TEXT_REVIEW_OBSERVED_SPEED" +msgstr "* Observerad hastighet: {} sekunder\n" + +msgid "TEXT_REVIEW_FIRST_MEANINGFUL_PAINT" +msgstr "* Första meningsfulla visuella ändring: {} sek\n" + +msgid "TEXT_REVIEW_FIRST_MEANINGFUL_PAINT_3G" +msgstr "* Första meningsfulla visuella ändring på 3G: {} sek\n" + +msgid "TEXT_REVIEW_CPU_IDLE" +msgstr "* CPU vilar efter: {} sek\n" + +msgid "TEXT_REVIEW_INTERACTIVE" +msgstr "* Webbplatsen är interaktiv: {} sek\n" + +msgid "TEXT_REVIEW_REDIRECTS" +msgstr "* Antal hänvisningar: {} st\n" + +msgid "TEXT_REVIEW_TOTAL_WEIGHT" +msgstr "* Sidans totala vikt: {} kb\n" + diff --git a/tests/performance_lighthouse.py b/tests/performance_lighthouse.py index e6ce5a6c..8b0a21e0 100644 --- a/tests/performance_lighthouse.py +++ b/tests/performance_lighthouse.py @@ -10,6 +10,8 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout @@ -23,6 +25,13 @@ def run_test(langCode, url, strategy='mobile', category='performance'): pwa = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=pwa&strategy=mobile&url=YOUR-SITE&key=YOUR-KEY seo = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=seo&strategy=mobile&url=YOUR-SITE&key=YOUR-KEY """ + + language = gettext.translation('performance_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&strategy={2}&key={3}'.format(category, check_url, strategy, googlePageSpeedApiKey) @@ -62,26 +71,26 @@ def run_test(langCode, url, strategy='mobile', category='performance'): if speedindex <= 500: points = 5 - review = '* Webbplatsen laddar in mycket snabbt!\n' + review = _("TEXT_REVIEW_VERY_GOOD") elif speedindex <= 1200: points = 4 - review = '* Webbplatsen är snabb.\n' + review = _("TEXT_REVIEW_IS_GOOD") elif speedindex <= 2500: points = 3 - review = '* Genomsnittlig hastighet.\n' + review = _("TEXT_REVIEW_IS_OK") elif speedindex <= 3999: points = 2 - review = '* Webbplatsen är ganska långsam.\n' + review = _("TEXT_REVIEW_IS_BAD") elif speedindex > 3999: points = 1 - review = '* Webbplatsen är väldigt långsam!\n' + review = _("TEXT_REVIEW_IS_VERY_BAD") - review += '* Observerad hastighet: {} sekunder\n'.format(convert_to_seconds(return_dict["observedSpeedIndex"], False)) - review += '* Första meningsfulla visuella ändring: {} sek\n'.format(convert_to_seconds(return_dict["firstMeaningfulPaint"], False)) - review += '* Första meningsfulla visuella ändring på 3G: {} sek\n'.format(convert_to_seconds(return_dict["first-contentful-paint-3g"], False)) - review += '* CPU vilar efter: {} sek\n'.format(convert_to_seconds(return_dict["firstCPUIdle"], False)) - review += '* Webbplatsen är interaktiv: {} sek\n'.format(convert_to_seconds(return_dict["interactive"], False)) - review += '* Antal hänvisningar: {} st\n'.format(return_dict["redirects"]) - review += '* Sidans totala vikt: {} kb\n'.format(int(return_dict["total-byte-weight"]/1000)) + review += _("TEXT_REVIEW_OBSERVED_SPEED").format(convert_to_seconds(return_dict["observedSpeedIndex"], False))#'* Observerad hastighet: {} sekunder\n'.format(convert_to_seconds(return_dict["observedSpeedIndex"], False)) + review += _("TEXT_REVIEW_FIRST_MEANINGFUL_PAINT").format(convert_to_seconds(return_dict["firstMeaningfulPaint"], False))#'* Första meningsfulla visuella ändring: {} sek\n'.format(convert_to_seconds(return_dict["firstMeaningfulPaint"], False)) + review += _("TEXT_REVIEW_FIRST_MEANINGFUL_PAINT_3G").format(convert_to_seconds(return_dict["first-contentful-paint-3g"], False))#'* Första meningsfulla visuella ändring på 3G: {} sek\n'.format(convert_to_seconds(return_dict["first-contentful-paint-3g"], False)) + review += _("TEXT_REVIEW_CPU_IDLE").format(convert_to_seconds(return_dict["firstCPUIdle"], False))#'* CPU vilar efter: {} sek\n'.format(convert_to_seconds(return_dict["firstCPUIdle"], False)) + review += _("TEXT_REVIEW_INTERACTIVE").format(convert_to_seconds(return_dict["interactive"], False))#'* Webbplatsen är interaktiv: {} sek\n'.format(convert_to_seconds(return_dict["interactive"], False)) + review += _("TEXT_REVIEW_REDIRECTS").format(return_dict["redirects"])#'* Antal hänvisningar: {} st\n'.format(return_dict["redirects"]) + review += _("TEXT_REVIEW_TOTAL_WEIGHT").format(int(return_dict["total-byte-weight"]/1000))#'* Sidans totala vikt: {} kb\n'.format(int(return_dict["total-byte-weight"]/1000)) return (points, review, return_dict) From e4d35735d0b6d5405950dbe13c7faaa41a419625 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sun, 13 Dec 2020 16:53:22 +0100 Subject: [PATCH 26/49] adjusted to show time spend for redirects in seconds --- .../en/LC_MESSAGES/performance_lighthouse.mo | Bin 1385 -> 1394 bytes .../en/LC_MESSAGES/performance_lighthouse.po | 2 +- .../sv/LC_MESSAGES/performance_lighthouse.mo | Bin 1437 -> 1450 bytes .../sv/LC_MESSAGES/performance_lighthouse.po | 2 +- tests/performance_lighthouse.py | 2 +- 5 files changed, 3 insertions(+), 3 deletions(-) diff --git a/locales/en/LC_MESSAGES/performance_lighthouse.mo b/locales/en/LC_MESSAGES/performance_lighthouse.mo index 242385e88f1c4730c562293d7a1f90e4abf3b611..fbb456ffc443ef908b1a49f5673ba80a9ab84bb2 100644 GIT binary patch delta 78 zcmaFK^@(f39XVwN28Nj|3=B#@dM}Wc2htCKbRm#tVBPp@A+thAW^SrNaY1Tci9%X_ bkwQ^wN@h`Na!Ik3LUpY|acc79O)T315FQrS delta 69 zcmeyw^^$AC9XTlm28Nj|3=B#@dJB-22h!JpbRm%b&a&~>LS}Kl(%hufB8B`kg`(7y T%%arfl42``>e|V>S+)ZJZ*CNI diff --git a/locales/en/LC_MESSAGES/performance_lighthouse.po b/locales/en/LC_MESSAGES/performance_lighthouse.po index 6e6dbe21..c71241cd 100644 --- a/locales/en/LC_MESSAGES/performance_lighthouse.po +++ b/locales/en/LC_MESSAGES/performance_lighthouse.po @@ -49,7 +49,7 @@ msgid "TEXT_REVIEW_INTERACTIVE" msgstr "* The website is interactive: {} sec\n" msgid "TEXT_REVIEW_REDIRECTS" -msgstr "* Number of redirects: {}\n" +msgstr "* Time spent for redirects: {} sec\n" msgid "TEXT_REVIEW_TOTAL_WEIGHT" msgstr "* Total weight of the page: {} kb\n" diff --git a/locales/sv/LC_MESSAGES/performance_lighthouse.mo b/locales/sv/LC_MESSAGES/performance_lighthouse.mo index 1bee9fd673a395e5a201bb8adbe7f294d42677bf..064e25c126a875c30a3c2e7696b13d9390aae22b 100644 GIT binary patch delta 84 zcmbQsy^4Fn9a${~28OdN3=A?r`Uj9!2hyUf3=BCy+IHj5Ys~5)nJEgz1*v%{sYQt? j3I&IkDr6j9l2?{loR^uGo>*k1P+hA~oSHrPCCe57s}UNM delta 56 zcmZ3*J(qjJ9a&ih28OdN3=A?r`ZbVN2htp@3=BCy+F;|)Ys{>Uc_oQClOM2%FqTaI H# Date: Sat, 19 Dec 2020 21:13:21 +0100 Subject: [PATCH 27/49] removed obsolete google api key reference --- tests/standard_files.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/standard_files.py b/tests/standard_files.py index 8a75968f..4dafe670 100644 --- a/tests/standard_files.py +++ b/tests/standard_files.py @@ -15,7 +15,6 @@ ### DEFAULTS request_timeout = config.http_request_timeout -googlePageSpeedApiKey = config.googlePageSpeedApiKey def run_test(langCode, url): """ From 157b1c9503fb420be872bf3fe4d723e2969a7c6d Mon Sep 17 00:00:00 2001 From: Mattias Date: Sun, 20 Dec 2020 11:40:02 +0100 Subject: [PATCH 28/49] Enable csv file as input ( #24 ) --- default.py | 6 +++++ engines/csv.py | 72 ++++++++++++++++++++++++++++++++++++++++++++++++-- models.py | 16 +++++++++++ 3 files changed, 92 insertions(+), 2 deletions(-) diff --git a/default.py b/default.py index 4d733d54..a0a0d838 100644 --- a/default.py +++ b/default.py @@ -232,12 +232,18 @@ def main(argv): sys.exit(2) elif opt in ("-i", "--input"): # input file path input_filename = arg + + file_ending = "" file_long_ending = "" + if (len(input_filename) > 4): + file_ending = input_filename[-4:].lower() if (len(input_filename) > 7): file_long_ending = input_filename[-7:].lower() if file_long_ending == ".sqlite": from engines.sqlite import read_sites, add_site, delete_site + if (file_ending == ".csv"): + from engines.csv import read_sites, add_site, delete_site else: from engines.json import read_sites, add_site, delete_site sites = read_sites(input_filename) diff --git a/engines/csv.py b/engines/csv.py index a535cbce..be874ca1 100644 --- a/engines/csv.py +++ b/engines/csv.py @@ -1,10 +1,78 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- from models import Sites, SiteTests import csv + def write_tests(output_filename, siteTests): with open(output_filename, 'w', newline='') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=SiteTests.fieldnames()) writer.writeheader() - writer.writerows(siteTests) \ No newline at end of file + writer.writerows(siteTests) + + +def add_site(input_filename, url): + sites = read_sites(input_filename) + # print(sites) + id = len(sites) + sites.append([id, url]) + write_sites(input_filename, sites) + + print(_('TEXT_WEBSITE_URL_ADDED').format(url)) + + return sites + + +def delete_site(input_filename, url): + sites = read_sites(input_filename) + tmpSites = list() + for site in sites: + site_id = site[0] + site_url = site[1] + if (url != site_url): + tmpSites.append([site_id, site_url]) + + write_sites(input_filename, tmpSites) + + print(_('TEXT_WEBSITE_URL_DELETED').format(site_url)) + + return tmpSites + + +def read_sites(input_filename): + sites = list() + + with open(input_filename, newline='') as csvfile: + dialect = csv.Sniffer().sniff(csvfile.read(1024)) + csvfile.seek(0) + reader = csv.reader(csvfile, dialect) + + with open(input_filename, newline='') as csvfile: + csv_reader = csv.reader(csvfile, delimiter=',', quotechar='|') + current_siteid = 0 + for row in csv_reader: + number_of_fields = len(Sites.fieldnames()) + current_number_of_fields = len(row) + if number_of_fields == current_number_of_fields: + # ignore first row as that is our header info + if current_siteid != 0: + sites.append([row[0], row[1]]) + elif current_number_of_fields == 1: + # we have no header and only one colmn, use column as website url + sites.append([current_siteid, "".join(row)]) + current_siteid += 1 + return sites + +def write_sites(output_filename, sites): + sites_output = list() + for site in sites: + site_id = site[0] + site_url = site[1] + site_object = Sites(id=site_id, website=site_url).todata() + sites_output.append(site_object) + + with open(output_filename, 'w', newline='') as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=Sites.fieldnames()) + + writer.writeheader() + writer.writerows(sites_output) diff --git a/models.py b/models.py index f6682f58..e16aceab 100644 --- a/models.py +++ b/models.py @@ -10,6 +10,22 @@ class Sites(object): website = "" active = 1 + def __init__(self, id, website): + self.id = id + self.website = website + + def todata(self): + result = { + 'id': self.id, + 'website': self.website + } + return result + + @staticmethod + def fieldnames(): + result = [ 'id', 'website'] + return result + def __repr__(self): return '' % self.title From 4f85c7184842ca3346ad4ca6674084daca2e3d72 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sun, 20 Dec 2020 19:39:52 +0100 Subject: [PATCH 29/49] Added security.txt test + bugfixes - Added security.txt test ( #23 ) - changed default timout to 60 seconds - changed default useragent - cleaned up some unused variables - fixed so all requests uses timeout from config - fixed so all (except to webbkollen) request use useragent in config - fixed 'NoneType' object has no attribute 'lower' - fixed crash on certificate error (we now give result on not being able to download it) - fixing error when robots.txt is html page and is falsly given points because strings match (checks for -fifxed crash on connection exception, we now handles it as not being able to download resource --- SAMPLE-config.py | 3 +- locales/en/LC_MESSAGES/standard_files.mo | Bin 1089 -> 1527 bytes locales/en/LC_MESSAGES/standard_files.po | 15 + locales/sv/LC_MESSAGES/standard_files.mo | Bin 1087 -> 1532 bytes locales/sv/LC_MESSAGES/standard_files.po | 15 + tests/a11y_lighthouse.py | 1 - tests/best_practice_lighthouse.py | 1 - tests/css_validator_w3c.py | 3 +- tests/frontend_quality_yellow_lab_tools.py | 1 - tests/html_validator_w3c.py | 3 +- tests/page_not_found.py | 3 +- tests/performance_lighthouse.py | 1 - tests/privacy_webbkollen.py | 1 + tests/pwa_lighthouse.py | 1 - tests/seo_lighthouse.py | 1 - tests/standard_files.py | 311 +++++++++++++++------ tests/utils.py | 42 +-- 17 files changed, 286 insertions(+), 116 deletions(-) diff --git a/SAMPLE-config.py b/SAMPLE-config.py index 1001a3f5..00fa11e1 100644 --- a/SAMPLE-config.py +++ b/SAMPLE-config.py @@ -6,9 +6,10 @@ #### NOTE: Rename this file to 'config.py' and fill in the missing info below # useragent for HTTP requests +#useragent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0' useragent = 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)' # enter your API key for Google Pagespeed API googlePageSpeedApiKey = "" -http_request_timeout = 5 +http_request_timeout = 60 diff --git a/locales/en/LC_MESSAGES/standard_files.mo b/locales/en/LC_MESSAGES/standard_files.mo index 0c94031d9d14b43dbce0d69b9a756e652ba338e8..c1e987ede56c696aff31bea71b8e6155010164c1 100644 GIT binary patch delta 636 zcmaLR%}N4M7zW_aR4B6|D^iO%1^pNzb_a?;vc+Pvi@BBRUzYjKET(%3s7{Q#zJU7FS;j|EqArHY} zINQ!aIDs62qj0mGccB~k2rj^@wvTWM`3;))+53oGL=Ey_F^&yCG(Qo79GX%vj(lk) znuM>=3qRl@blZrw;3_mPzJW9F9{S-E48Z|A(Hx9Hv#!EQYtMB2I&X)3$gu{x;BrU&fu!EJLujhA4d7ag?idt+vD62-m JadvAx{sc58kGTK< delta 205 zcmey)eUPL6o)F7a1|Z-9Vi_RL0dbJP9w1v7i1z|99}r)J@*ej)=1_p5k25W{tK!y}hK!llrK^;iz1L*`H q9S)@BfbC18Gel{TE0}0%>)Y$y|&Xo4J{0FikFCZ3h58q7{$; diff --git a/locales/en/LC_MESSAGES/standard_files.po b/locales/en/LC_MESSAGES/standard_files.po index 0591707a..1c6039c5 100644 --- a/locales/en/LC_MESSAGES/standard_files.po +++ b/locales/en/LC_MESSAGES/standard_files.po @@ -41,3 +41,18 @@ msgstr "* RSS subscription is missing in meta.\n" msgid "TEXT_RSS_FEED_FOUND" msgstr "* RSS subscription found.\n" + +msgid "TEXT_SECURITY_MISSING" +msgstr "* security.txt is missing.\n" + +msgid "TEXT_SECURITY_WRONG_CONTENT" +msgstr "* security.txt has wrong content.\n" + +msgid "TEXT_SECURITY_OK_CONTENT" +msgstr "* security.txt seems to work.\n" + +msgid "TEXT_SECURITY_REQUIRED_CONTACT_MISSING" +msgstr "* security.txt required contact is missing.\n" + +msgid "TEXT_SECURITY_REQUIRED_EXPIRES_MISSING" +msgstr "* security.txt required expires (added in draft v10) is missing.\n" diff --git a/locales/sv/LC_MESSAGES/standard_files.mo b/locales/sv/LC_MESSAGES/standard_files.mo index b9d573d96fe3f42d7c9253e038843c0a6904d4d5..45c60bdde157f0c33e12cff080558331b93b3e73 100644 GIT binary patch delta 622 zcmZ|JO-lkn7zgmjw}`SND=LeavZ5kPuiZo!vq)1{vIt};HCa*E9mP(9-#{!pbhT@j zphNTm`60Rmb?eYKDCobrMI{alzxmHRGtb=jzV}z2?B-L#uwhPNo@n9Muo{Slkq6)) zoYAuzjw1Wv5Zu#q4vr&N;Q~CVkEK?RcHc>5p1kL?Gs)opeO?4$PH8b z!8fRVu*Xc~gig2*18@vBp#$E)dH4u7;0IiV>lUJESccli8gQ1ZM0Z$d7oRP)>!XU` zi${avZgMLaiv>dg5`~1AjD7EhHNQeAzt%>L|`Qi?1otRQzD z$%QgZU1G0xR*P1R4f>5Rqz delta 213 zcmeyvy`Q80o)F7a1|Z-9Vi_RL0dbJP9w1v7i1z|99}r)J@*ej)=1_lWR25SZuW{AbIKw23nVFjdPfOH&? wmIKnefwUTsz6GR}fHWfu1A`QhR$-aU#h5Xfo2ir8z{+IvbS8eL$yKaP0FIFrj{pDw diff --git a/locales/sv/LC_MESSAGES/standard_files.po b/locales/sv/LC_MESSAGES/standard_files.po index 7a4d3fd0..7fe8e623 100644 --- a/locales/sv/LC_MESSAGES/standard_files.po +++ b/locales/sv/LC_MESSAGES/standard_files.po @@ -41,3 +41,18 @@ msgstr "* RSS-prenumeration saknas i meta.\n" msgid "TEXT_RSS_FEED_FOUND" msgstr "* RSS-prenumeration hittad.\n" + +msgid "TEXT_SECURITY_MISSING" +msgstr "* security.txt saknas.\n" + +msgid "TEXT_SECURITY_WRONG_CONTENT" +msgstr "* security.txt har inte förväntat innehåll.\n" + +msgid "TEXT_SECURITY_OK_CONTENT" +msgstr "* security.txt verkar ok.\n" + +msgid "TEXT_SECURITY_REQUIRED_CONTACT_MISSING" +msgstr "* security.txt kontakt fält saknas.\n" + +msgid "TEXT_SECURITY_REQUIRED_EXPIRES_MISSING" +msgstr "* security.txt fält för utgångstid saknas (krav tillagt i utkast v10).\n" diff --git a/tests/a11y_lighthouse.py b/tests/a11y_lighthouse.py index 9ed9ae27..2105d2f3 100644 --- a/tests/a11y_lighthouse.py +++ b/tests/a11y_lighthouse.py @@ -12,7 +12,6 @@ from tests.utils import * ### DEFAULTS -request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey def run_test(langCode, url, strategy='mobile', category='accessibility'): diff --git a/tests/best_practice_lighthouse.py b/tests/best_practice_lighthouse.py index 406606be..0ba39cf8 100644 --- a/tests/best_practice_lighthouse.py +++ b/tests/best_practice_lighthouse.py @@ -12,7 +12,6 @@ from tests.utils import * ### DEFAULTS -request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey def run_test(langCode, url, strategy='mobile', category='best-practices'): diff --git a/tests/css_validator_w3c.py b/tests/css_validator_w3c.py index 631d9a31..a2faf4a8 100644 --- a/tests/css_validator_w3c.py +++ b/tests/css_validator_w3c.py @@ -15,6 +15,7 @@ ### DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent def run_test(langCode, url): """ @@ -33,7 +34,7 @@ def run_test(langCode, url): ## kollar koden try: url = 'https://jigsaw.w3.org/css-validator/validator?uri={0}&profile=css3svg&usermedium=all&warning=1&vextwarning=&lang=en'.format(url.replace('/', '%2F').replace(':', '%3A')) - headers = {'user-agent': 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'} + headers = {'user-agent': useragent} request = requests.get(url, allow_redirects=False, headers=headers, timeout=request_timeout*2) ## hämta HTML diff --git a/tests/frontend_quality_yellow_lab_tools.py b/tests/frontend_quality_yellow_lab_tools.py index db27fca1..824efb9d 100644 --- a/tests/frontend_quality_yellow_lab_tools.py +++ b/tests/frontend_quality_yellow_lab_tools.py @@ -14,7 +14,6 @@ _ = gettext.gettext ### DEFAULTS -request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey def run_test(langCode, url, device='phone'): diff --git a/tests/html_validator_w3c.py b/tests/html_validator_w3c.py index 357950e8..c7c92877 100644 --- a/tests/html_validator_w3c.py +++ b/tests/html_validator_w3c.py @@ -15,6 +15,7 @@ ### DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent def run_test(langCode, url): """ @@ -33,7 +34,7 @@ def run_test(langCode, url): ## kollar koden try: url = 'https://validator.w3.org/nu/?doc={0}'.format(url.replace('/', '%2F').replace(':', '%3A')) - headers = {'user-agent': 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'} + headers = {'user-agent': useragent} request = requests.get(url, allow_redirects=False, headers=headers, timeout=request_timeout) ## hämta HTML diff --git a/tests/page_not_found.py b/tests/page_not_found.py index b88c4d04..0c44811c 100644 --- a/tests/page_not_found.py +++ b/tests/page_not_found.py @@ -15,6 +15,7 @@ ### DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent def run_test(langCode, url): """ @@ -34,7 +35,7 @@ def run_test(langCode, url): ## kollar koden o = urllib.parse.urlparse(url) url = '{0}://{1}/{3}/{2}'.format(o.scheme, o.netloc, 'finns-det-en-sida/pa-den-har-adressen/testanrop/', get_guid(5)) - headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0'} + headers = {'user-agent': useragent} request = requests.get(url, allow_redirects=True, headers=headers, timeout=request_timeout) code = request.status_code if code == 404: diff --git a/tests/performance_lighthouse.py b/tests/performance_lighthouse.py index 9f2fa30f..1833b917 100644 --- a/tests/performance_lighthouse.py +++ b/tests/performance_lighthouse.py @@ -14,7 +14,6 @@ _ = gettext.gettext ### DEFAULTS -request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey def run_test(langCode, url, strategy='mobile', category='performance'): diff --git a/tests/privacy_webbkollen.py b/tests/privacy_webbkollen.py index de34d5d6..b8aac928 100644 --- a/tests/privacy_webbkollen.py +++ b/tests/privacy_webbkollen.py @@ -15,6 +15,7 @@ ### DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent def run_test(langCode, url): import time diff --git a/tests/pwa_lighthouse.py b/tests/pwa_lighthouse.py index ac22e974..5aaaa508 100644 --- a/tests/pwa_lighthouse.py +++ b/tests/pwa_lighthouse.py @@ -12,7 +12,6 @@ from tests.utils import * ### DEFAULTS -request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey def run_test(langCode, url, strategy='mobile', category='pwa'): diff --git a/tests/seo_lighthouse.py b/tests/seo_lighthouse.py index 187a3e51..70a47b10 100644 --- a/tests/seo_lighthouse.py +++ b/tests/seo_lighthouse.py @@ -12,7 +12,6 @@ from tests.utils import * ### DEFAULTS -request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey def run_test(langCode, url, strategy='mobile', category='seo'): diff --git a/tests/standard_files.py b/tests/standard_files.py index 4dafe670..c7c853ba 100644 --- a/tests/standard_files.py +++ b/tests/standard_files.py @@ -1,10 +1,10 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import socket import ssl import json import requests -import urllib # https://docs.python.org/3/library/urllib.parse.html +import urllib # https://docs.python.org/3/library/urllib.parse.html import uuid import re from bs4 import BeautifulSoup @@ -13,93 +13,226 @@ import gettext _ = gettext.gettext -### DEFAULTS +# DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent + def run_test(langCode, url): - """ - Looking for: - * robots.txt - * at least one sitemap/siteindex mentioned in robots.txt - * a RSS feed mentioned in the page's meta - """ - - language = gettext.translation('standard_files', localedir='locales', languages=[langCode]) - language.install() - _ = language.gettext - - print(_('TEXT_RUNNING_TEST')) - - o = urllib.parse.urlparse(url) - parsed_url = '{0}://{1}/'.format(o.scheme, o.netloc) - robots_content = httpRequestGetContent(parsed_url + 'robots.txt') - - review = '' - return_dict = dict() - return_dict["num_sitemaps"] = 0 - points = 5.0 - - if robots_content == None or ('user-agent' not in robots_content.lower() and 'disallow' not in robots_content.lower() and 'allow' not in robots_content.lower()): - points -= 3 - review += _("TEXT_ROBOTS_MISSING") - return_dict['robots.txt'] = 'missing content' - else: - review += _("TEXT_ROBOTS_OK") - return_dict['robots.txt'] = 'ok' - - if 'sitemap:' not in robots_content.lower(): - points -= 2 - review += _("TEXT_SITEMAP_MISSING") - return_dict['sitemap'] = 'not in robots.txt' - else: - review += _("TEXT_SITEMAP_FOUND") - return_dict['sitemap'] = 'ok' - - smap_pos = robots_content.lower().find('sitemap') - smaps = robots_content[smap_pos:].split('\n') - found_smaps = [] - for line in smaps: - if 'sitemap:' in line.lower(): - found_smaps.append(line.lower().replace('sitemap:', '').strip()) - - return_dict["num_sitemaps"] = len(found_smaps) - - if len(found_smaps) > 0: - return_dict["sitemaps"] = found_smaps - smap_content = httpRequestGetContent(found_smaps[0]) - - if not is_sitemap(smap_content): - points -= 1 - review += _("TEXT_SITEMAP_BROKEN") - return_dict['sitemap_check'] = '\'{0}\' seem to be broken'.format(found_smaps[0]) - else: - review += _("TEXT_SITEMAP_OK") - return_dict['sitemap_check'] = '\'{0}\' seem ok'.format(found_smaps[0]) - - # TODO: validate first feed - headers = {'user-agent': config.useragent} - request = requests.get(url, allow_redirects=True, headers=headers, timeout=request_timeout) - - soup = BeautifulSoup(request.text, 'lxml') - #feed = soup.find_all(rel='alternate') - feed = soup.find_all("link", {"type" : "application/rss+xml"}) - - if len(feed) == 0: - points -= 0.5 - review += _("TEXT_RSS_FEED_MISSING") - return_dict['feed'] = 'not in meta' - return_dict['num_feeds'] = len(feed) - elif len(feed) > 0: - review += _("TEXT_RSS_FEED_FOUND") - return_dict['feed'] = 'found in meta' - return_dict['num_feeds'] = len(feed) - tmp_feed = [] - for single_feed in feed: - tmp_feed.append(single_feed.get('href')) - - return_dict['feeds'] = tmp_feed - - if points < 1: - points = 1 - - return (points, review, return_dict) + """ + Looking for: + * robots.txt + * at least one sitemap/siteindex mentioned in robots.txt + * a RSS feed mentioned in the page's meta + """ + + language = gettext.translation( + 'standard_files', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + + o = urllib.parse.urlparse(url) + parsed_url = '{0}://{1}/'.format(o.scheme, o.netloc) + + review = '' + return_dict = dict() + points = 5.0 + + # robots.txt (up to -3) + robots_result = validate_robots(_, parsed_url) + points -= robots_result[0] + review += robots_result[1] + return_dict.update(robots_result[2]) + robots_content = robots_result[3] + + # sitemap.xml (up to -3) + has_robots_txt = return_dict['robots.txt'] == 'ok' + sitemap_result = validate_sitemap(_, robots_content, has_robots_txt) + points -= sitemap_result[0] + review += sitemap_result[1] + return_dict.update(sitemap_result[2]) + + # rss feed (up to -0.5) + feed_result = validate_feed(_, url) + points -= feed_result[0] + review += feed_result[1] + return_dict.update(feed_result[2]) + + # security.txt (up to -1) + security_txt_result = validate_security_txt(_, parsed_url) + points -= security_txt_result[0] + review += security_txt_result[1] + return_dict.update(security_txt_result[2]) + + # minimum score is 1, make sure we have at least 1 + if points < 1: + points = 1 + + return (points, review, return_dict) + + +def validate_robots(_, parsed_url): + review = '' + return_dict = dict() + points = 0.0 + + robots_content = httpRequestGetContent(parsed_url + 'robots.txt') + + if robots_content == None or '' in robots_content.lower() or ('user-agent' not in robots_content.lower() and 'disallow' not in robots_content.lower() and 'allow' not in robots_content.lower()): + points = 3 + review += _("TEXT_ROBOTS_MISSING") + return_dict['robots.txt'] = 'missing content' + robots_content = '' + else: + review += _("TEXT_ROBOTS_OK") + return_dict['robots.txt'] = 'ok' + + return (points, review, return_dict, robots_content) + + +def validate_sitemap(_, robots_content, has_robots_txt): + review = '' + return_dict = dict() + return_dict["num_sitemaps"] = 0 + points = 0.0 + + if robots_content == None or not has_robots_txt or 'sitemap:' not in robots_content.lower(): + points += 2 + review += _("TEXT_SITEMAP_MISSING") + return_dict['sitemap'] = 'not in robots.txt' + else: + review += _("TEXT_SITEMAP_FOUND") + return_dict['sitemap'] = 'ok' + + smap_pos = robots_content.lower().find('sitemap') + smaps = robots_content[smap_pos:].split('\n') + found_smaps = [] + for line in smaps: + if 'sitemap:' in line.lower(): + found_smaps.append( + line.lower().replace('sitemap:', '').strip()) + + return_dict["num_sitemaps"] = len(found_smaps) + + if len(found_smaps) > 0: + return_dict["sitemaps"] = found_smaps + smap_content = httpRequestGetContent(found_smaps[0]) + + if not is_sitemap(smap_content): + points += 1 + review += _("TEXT_SITEMAP_BROKEN") + return_dict['sitemap_check'] = '\'{0}\' seem to be broken'.format( + found_smaps[0]) + else: + review += _("TEXT_SITEMAP_OK") + return_dict['sitemap_check'] = '\'{0}\' seem ok'.format( + found_smaps[0]) + + return (points, review, return_dict) + + +def validate_feed(_, url): + # TODO: validate first feed + + review = '' + return_dict = dict() + points = 0.0 + feed = list() + + headers = {'user-agent': config.useragent} + try: + request = requests.get(url, allow_redirects=True, + headers=headers, timeout=request_timeout) + soup = BeautifulSoup(request.text, 'lxml') + # feed = soup.find_all(rel='alternate') + feed = soup.find_all("link", {"type": "application/rss+xml"}) + + except: + #print('Exception looking for feed, probably connection problems') + pass + + if len(feed) == 0: + points = 0.5 + review += _("TEXT_RSS_FEED_MISSING") + return_dict['feed'] = 'not in meta' + return_dict['num_feeds'] = len(feed) + elif len(feed) > 0: + review += _("TEXT_RSS_FEED_FOUND") + return_dict['feed'] = 'found in meta' + return_dict['num_feeds'] = len(feed) + tmp_feed = [] + for single_feed in feed: + tmp_feed.append(single_feed.get('href')) + + return_dict['feeds'] = tmp_feed + + return (points, review, return_dict) + + +def validate_security_txt(_, parsed_url): + review = '' + return_dict = dict() + points = 0.0 + + security_wellknown_request = False + security_root_request = False + + headers = { + 'user-agent': useragent} + # normal location for security.txt + security_wellknown_url = parsed_url + '.well-known/security.txt' + try: + security_wellknown_request = requests.get(security_wellknown_url, allow_redirects=True, + headers=headers, timeout=request_timeout) + except: + #print('Exception looking for security.txt, probably connection problems') + pass + + security_wellknown_content = httpRequestGetContent( + security_wellknown_url) + + # security.txt can also be placed in root if for example technical reasons prohibit use of /.well-known/ + security_root_url = parsed_url + 'security.txt' + try: + security_root_request = requests.get(security_root_url, allow_redirects=True, + headers=headers, timeout=request_timeout) + except: + #print('Exception looking for security.txt, probably connection problems') + pass + security_root_content = httpRequestGetContent(security_root_url) + + #print('security_wellknown_content:' + security_wellknown_content) + #print('security_root_content:' + security_root_content) + + if not security_wellknown_request and not security_root_request: + # Can't find security.txt (not giving us 200 as status code) + points = 1.0 + review += _("TEXT_SECURITY_MISSING") + return_dict['security.txt'] = 'missing' + elif (security_wellknown_content == None or ('' in security_wellknown_content.lower()) or (security_root_content == None or ('' in security_root_content.lower()))): + # Html (404 page?) content instead of expected content + points = 1.0 + review += _("TEXT_SECURITY_WRONG_CONTENT") + return_dict['security.txt'] = 'wrong content' + elif (('Contact:' in security_wellknown_content and 'Expires:' in security_wellknown_content.lower()) or (('Contact:' in security_root_content and 'Expires:' in security_root_content.lower()))): + # Everything seems ok + review += _("TEXT_SECURITY_OK_CONTENT") + return_dict['security.txt'] = 'ok' + elif (not ('Contact:' in security_wellknown_content) and (not ('Contact:' in security_root_content))): + # Missing required Contact + points = 0.5 + review += _("TEXT_SECURITY_REQUIRED_CONTACT_MISSING") + return_dict['security.txt'] = 'required contact missing' + elif (not ('Expires:' in security_wellknown_content) or (not ('Expires:' in security_root_content))): + # Missing required Expires (added in version 10 of draft) + points = 0.25 + review += _("TEXT_SECURITY_REQUIRED_EXPIRES_MISSING") + return_dict['security.txt'] = 'required expires missing' + # print('* security.txt required content is missing') + + # print(security_wellknown_content) + # print('* security.txt seems ok') + + return (points, review, return_dict) diff --git a/tests/utils.py b/tests/utils.py index 6e7978e5..b5980bcd 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,30 +1,35 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import socket import ssl import json import requests -import urllib # https://docs.python.org/3/library/urllib.parse.html +import urllib # https://docs.python.org/3/library/urllib.parse.html import uuid import re from bs4 import BeautifulSoup import config -### DEFAULTS +# DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent googlePageSpeedApiKey = config.googlePageSpeedApiKey + def httpRequestGetContent(url): """Trying to fetch the response content Attributes: url, as for the URL to fetch """ try: - a = requests.get(url) + headers = {'user-agent': useragent} + a = requests.get(url, allow_redirects=False, + headers=headers, timeout=request_timeout*2) + #a = requests.get(url, timeout=request_timeout) return a.text except requests.exceptions.SSLError: - if 'http://' in url: # trying the same URL over SSL/TLS + if 'http://' in url: # trying the same URL over SSL/TLS print('Info: Trying SSL before giving up.') return httpRequestGetContent(url.replace('http://', 'https://')) except requests.exceptions.ConnectionError: @@ -33,34 +38,37 @@ def httpRequestGetContent(url): pass except: print( - 'Error! Unfortunately the request for URL "{0}" either timed out or failed for other reason(s). The timeout is set to {1} seconds.\nMessage:\n{2}'.format(url, timeout_in_seconds, sys.exc_info()[0])) + 'Error! Unfortunately the request for URL "{0}" either timed out or failed for other reason(s). The timeout is set to {1} seconds.\nMessage:\n{2}'.format(url, request_timeout, sys.exc_info()[0])) pass + def get_guid(length): """ Generates a unique string in specified length """ return str(uuid.uuid4())[0:length] + def convert_to_seconds(millis, return_with_seconds=True): """ Converts milliseconds to seconds. Arg: 'return_with_seconds' defaults to True and returns string ' sekunder' after the seconds """ if return_with_seconds: - return (millis/1000)%60 + " sekunder" + return (millis/1000) % 60 + " sekunder" else: - return (millis/1000)%60 + return (millis/1000) % 60 + def is_sitemap(content): - """Check a string to see if its content is a sitemap or siteindex. + """Check a string to see if its content is a sitemap or siteindex. - Attributes: content (string) - """ - try: - if 'www.sitemaps.org/schemas/sitemap/' in content or ' Date: Sun, 20 Dec 2020 20:26:10 +0100 Subject: [PATCH 30/49] Added support for sitemap as source for url:s ( #17 ) example use: python3 default.py -t 9 -r -i https://webperf.se/sitemap-posts.xml --- default.py | 110 +++++++++++++++++++++++++--------------- engines/sitemap.py | 39 ++++++++++++++ tests/standard_files.py | 1 + tests/utils.py | 3 ++ 4 files changed, 112 insertions(+), 41 deletions(-) create mode 100644 engines/sitemap.py diff --git a/default.py b/default.py index a0a0d838..e6df6195 100644 --- a/default.py +++ b/default.py @@ -1,5 +1,6 @@ -#-*- coding: utf-8 -*- -import sys, getopt +# -*- coding: utf-8 -*- +import sys +import getopt import datetime from models import Sites, SiteTests import config @@ -8,7 +9,9 @@ TEST_ALL = -1 -(TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_GOOGLE_LIGHTHOUSE_SEO, TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, TEST_HTML, TEST_CSS, TEST_GOOGLE_LIGHTHOUSE_PWA, TEST_STANDARD_FILES, TEST_GOOGLE_LIGHTHOUSE_A11Y, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_YELLOW_LAB_TOOLS, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) +(TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_GOOGLE_LIGHTHOUSE_SEO, TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, TEST_HTML, TEST_CSS, TEST_GOOGLE_LIGHTHOUSE_PWA, TEST_STANDARD_FILES, + TEST_GOOGLE_LIGHTHOUSE_A11Y, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_YELLOW_LAB_TOOLS, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) + def testsites(langCode, sites, test_type=None, show_reviews=False, only_test_untested_last_hours=24, order_by='title ASC'): """ @@ -71,55 +74,72 @@ def testsites(langCode, sites, test_type=None, show_reviews=False, only_test_unt json_data = '' pass - checkreport = str(the_test_result[1]).encode('utf-8') # för att lösa encoding-probs - jsondata = str(json_data).encode('utf-8') # --//-- + checkreport = str(the_test_result[1]).encode( + 'utf-8') # för att lösa encoding-probs + jsondata = str(json_data).encode('utf-8') # --//-- - site_test = SiteTests(site_id=site_id, type_of_test=test_type, check_report=checkreport, rating=the_test_result[0], test_date=datetime.datetime.now(), json_check_data=jsondata).todata() + site_test = SiteTests(site_id=site_id, type_of_test=test_type, check_report=checkreport, + rating=the_test_result[0], test_date=datetime.datetime.now(), json_check_data=jsondata).todata() result.append(site_test) - the_test_result = None # 190506 för att inte skriva testresultat till sajter när testet kraschat. Måste det sättas till ''? + # 190506 för att inte skriva testresultat till sajter när testet kraschat. Måste det sättas till ''? + the_test_result = None except Exception as e: print(_('TEXT_EXCEPTION'), website, '\n', e) pass i += 1 - + return result -def testing(langCode, sites, test_type= TEST_ALL, show_reviews= False): - print(_('TEXT_TESTING_START_HEADER').format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + +def testing(langCode, sites, test_type=TEST_ALL, show_reviews=False): + print(_('TEXT_TESTING_START_HEADER').format( + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) tests = list() ############## if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE): - tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y')) - tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_SEO): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO')) - tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_SEO, show_reviews=show_reviews)) + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_SEO, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_PWA): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA')) - tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE): print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) - tests.extend(testsites(langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): - tests.extend(testsites(langCode, sites, test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_HTML): - tests.extend(testsites(langCode, sites, test_type=TEST_HTML, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_HTML, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_CSS): - tests.extend(testsites(langCode, sites, test_type=TEST_CSS, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_CSS, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_WEBBKOLL): - tests.extend(testsites(langCode, sites, test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_STANDARD_FILES): - tests.extend(testsites(langCode, sites, test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_YELLOW_LAB_TOOLS): - tests.extend(testsites(langCode, sites, test_type=TEST_YELLOW_LAB_TOOLS, show_reviews=show_reviews)) - + tests.extend(testsites( + langCode, sites, test_type=TEST_YELLOW_LAB_TOOLS, show_reviews=show_reviews)) + return tests + def validate_test_type(test_type): if test_type != TEST_HTML and test_type != TEST_PAGE_NOT_FOUND and test_type != TEST_CSS and test_type != TEST_WEBBKOLL and test_type != TEST_GOOGLE_LIGHTHOUSE and test_type != TEST_GOOGLE_LIGHTHOUSE_PWA and test_type != TEST_GOOGLE_LIGHTHOUSE_A11Y and test_type != TEST_GOOGLE_LIGHTHOUSE_SEO and test_type != TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE and test_type != TEST_STANDARD_FILES and test_type != TEST_YELLOW_LAB_TOOLS: print(_('TEXT_TEST_VALID_ARGUMENTS')) @@ -138,6 +158,7 @@ def validate_test_type(test_type): else: return test_type + def main(argv): """ WebPerf Core @@ -170,13 +191,14 @@ def main(argv): global _ # add support for default (en) language - language = gettext.translation('webperf-core', localedir='locales', languages=[langCode]) + language = gettext.translation( + 'webperf-core', localedir='locales', languages=[langCode]) language.install() _ = language.gettext - try: - opts, args = getopt.getopt(argv,"hu:t:i:o:rA:D:L:",["help","url","test", "input", "output", "review", "report", "addUrl", "deleteUrl", "language"]) + opts, args = getopt.getopt(argv, "hu:t:i:o:rA:D:L:", [ + "help", "url", "test", "input", "output", "review", "report", "addUrl", "deleteUrl", "language"]) except getopt.GetoptError: print(main.__doc__) sys.exit(2) @@ -185,17 +207,17 @@ def main(argv): show_help = True for opt, arg in opts: - if opt in ('-h', '--help'): # help + if opt in ('-h', '--help'): # help show_help = True - elif opt in ("-u", "--url"): # site url + elif opt in ("-u", "--url"): # site url sites.append([0, arg]) - elif opt in ("-A", "--addUrl"): # site url + elif opt in ("-A", "--addUrl"): # site url add_url = arg - elif opt in ("-D", "--deleteUrl"): # site url + elif opt in ("-D", "--deleteUrl"): # site url delete_url = arg - elif opt in ("-L", "--language"): # language code + elif opt in ("-L", "--language"): # language code # loop all available languages and verify language exist - import os + import os availableLanguages = list() localeDirs = os.listdir('locales') foundLang = False @@ -204,7 +226,8 @@ def main(argv): if (localeName[0:1] == '.'): continue - languageSubDirectory = os.path.join('locales', localeName, "LC_MESSAGES") + languageSubDirectory = os.path.join( + 'locales', localeName, "LC_MESSAGES") if (os.path.exists(languageSubDirectory)): availableLanguages.append(localeName) @@ -213,15 +236,17 @@ def main(argv): langCode = arg foundLang = True - language = gettext.translation('webperf-core', localedir='locales', languages=[langCode]) + language = gettext.translation( + 'webperf-core', localedir='locales', languages=[langCode]) language.install() _ = language.gettext if (not foundLang): # Not translateable - print('Language not found, only the following languages are available:', availableLanguages) + print( + 'Language not found, only the following languages are available:', availableLanguages) sys.exit(2) - elif opt in ("-t", "--test"): # test type + elif opt in ("-t", "--test"): # test type try: tmp_test_type = int(arg) test_type = validate_test_type(tmp_test_type) @@ -230,7 +255,7 @@ def main(argv): except Exception: validate_test_type(arg) sys.exit(2) - elif opt in ("-i", "--input"): # input file path + elif opt in ("-i", "--input"): # input file path input_filename = arg file_ending = "" @@ -240,18 +265,20 @@ def main(argv): if (len(input_filename) > 7): file_long_ending = input_filename[-7:].lower() - if file_long_ending == ".sqlite": + if file_long_ending == ".sqlite": from engines.sqlite import read_sites, add_site, delete_site if (file_ending == ".csv"): from engines.csv import read_sites, add_site, delete_site + if (file_ending == ".xml"): # https://example.com/sitemap.xml + from engines.sitemap import read_sites, add_site, delete_site else: from engines.json import read_sites, add_site, delete_site sites = read_sites(input_filename) pass - elif opt in ("-o", "--output"): # output file path + elif opt in ("-o", "--output"): # output file path output_filename = arg pass - elif opt in ("-r", "--review", "--report"): # writes reviews directly in terminal + elif opt in ("-r", "--review", "--report"): # writes reviews directly in terminal show_reviews = True pass @@ -267,7 +294,8 @@ def main(argv): sites = delete_site(input_filename, delete_url) elif (len(sites)): # run test(s) for every website - siteTests = testing(langCode, sites, test_type=test_type, show_reviews=show_reviews) + siteTests = testing( + langCode, sites, test_type=test_type, show_reviews=show_reviews) if (len(output_filename) > 0): file_ending = "" file_long_ending = "" @@ -294,4 +322,4 @@ def main(argv): If file is executed on itself then call a definition, mostly for testing purposes """ if __name__ == '__main__': - main(sys.argv[1:]) \ No newline at end of file + main(sys.argv[1:]) diff --git a/engines/sitemap.py b/engines/sitemap.py new file mode 100644 index 00000000..0012d832 --- /dev/null +++ b/engines/sitemap.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +from models import Sites, SiteTests +import config +from tests.utils import * +import re + + +def read_sites(input_sitemap_url): + sites = list() + + sitemap_content = httpRequestGetContent(input_sitemap_url) + + regex = r"(?P[^<]+)<" + matches = re.finditer(regex, sitemap_content, re.MULTILINE) + + current_siteid = 0 + for matchNum, match in enumerate(matches, start=1): + + item_url = match.group('itemurl') + + sites.append([current_siteid, item_url]) + current_siteid += 1 + return sites + + +def add_site(input_filename, url): + print("WARNING: sitemap engine is a read only method for testing all pages in a sitemap.xml, NO changes will be made") + + sites = read_sites(input_filename) + + return sites + + +def delete_site(input_filename, url): + print("WARNING: sitemap engine is a read only method for testing all pages in a sitemap.xml, NO changes will be made") + + sites = read_sites(input_filename) + + return sites diff --git a/tests/standard_files.py b/tests/standard_files.py index c7c853ba..dff601f8 100644 --- a/tests/standard_files.py +++ b/tests/standard_files.py @@ -118,6 +118,7 @@ def validate_sitemap(_, robots_content, has_robots_txt): if len(found_smaps) > 0: return_dict["sitemaps"] = found_smaps + smap_content = httpRequestGetContent(found_smaps[0]) if not is_sitemap(smap_content): diff --git a/tests/utils.py b/tests/utils.py index b5980bcd..c6427d92 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -33,6 +33,9 @@ def httpRequestGetContent(url): print('Info: Trying SSL before giving up.') return httpRequestGetContent(url.replace('http://', 'https://')) except requests.exceptions.ConnectionError: + if 'http://' in url: # trying the same URL over SSL/TLS + print('Connection error! Info: Trying SSL before giving up.') + return httpRequestGetContent(url.replace('http://', 'https://')) print( 'Connection error! Unfortunately the request for URL "{0}" failed.\nMessage:\n{1}'.format(url, sys.exc_info()[0])) pass From f13104240267565645797914bf1071c1867fdb78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sun, 20 Dec 2020 21:40:53 +0100 Subject: [PATCH 31/49] A11y translations --- locales/en/LC_MESSAGES/a11y_lighthouse.po | 33 +++++++++++++++++++++++ locales/sv/LC_MESSAGES/a11y_lighthouse.po | 33 +++++++++++++++++++++++ tests/a11y_lighthouse.py | 19 +++++++------ 3 files changed, 75 insertions(+), 10 deletions(-) create mode 100644 locales/en/LC_MESSAGES/a11y_lighthouse.po create mode 100644 locales/sv/LC_MESSAGES/a11y_lighthouse.po diff --git a/locales/en/LC_MESSAGES/a11y_lighthouse.po b/locales/en/LC_MESSAGES/a11y_lighthouse.po new file mode 100644 index 00000000..0e3ee509 --- /dev/null +++ b/locales/en/LC_MESSAGES/a11y_lighthouse.po @@ -0,0 +1,33 @@ +# English +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* The website do not have any apparent issues with accessibility!!\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* The website can be more accessible, but is rather good!\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* The accessibility is average, but need to get better.\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* The website is quite bad at accessibility and sucks for disabled people!\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* The accessibility is apparently really bad!\n" + +msgid "TEXT_REVIEW_A11Y_NUMBER_OF_PROBLEMS" +msgstr "* Number of problems with accessibility: {}\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/a11y_lighthouse.po b/locales/sv/LC_MESSAGES/a11y_lighthouse.po new file mode 100644 index 00000000..0c5d5ed8 --- /dev/null +++ b/locales/sv/LC_MESSAGES/a11y_lighthouse.po @@ -0,0 +1,33 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* Webbplatsen har inga uppenbara fel inom tillgänglighet!\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* Webbplatsen kan bli mer tillgänglig, men är helt ok.\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* Genomsnittlig tillgänglighet men behöver bli bättre.\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* Webbplatsen är dålig på tillgänglighet för funktions­varierade personer.\n" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* Väldigt dålig tillgänglighet!\n" + +msgid "TEXT_REVIEW_A11Y_NUMBER_OF_PROBLEMS" +msgstr "* Antal problem med tillgänglighet: {} st\n" \ No newline at end of file diff --git a/tests/a11y_lighthouse.py b/tests/a11y_lighthouse.py index 2105d2f3..92741c40 100644 --- a/tests/a11y_lighthouse.py +++ b/tests/a11y_lighthouse.py @@ -11,6 +11,9 @@ import config from tests.utils import * +import gettext +_ = gettext.gettext + ### DEFAULTS googlePageSpeedApiKey = config.googlePageSpeedApiKey @@ -63,29 +66,25 @@ def run_test(langCode, url, strategy='mobile', category='accessibility'): if fails == 0: points = 5 - review = '* Webbplatsen har inga uppenbara fel inom tillgänglighet!\n' + review = _('TEXT_REVIEW_A11Y_VERY_GOOD') elif fails <= 2: points = 4 - review = '* Webbplatsen kan bli mer tillgänglig, men är helt ok.\n' + review = _('TEXT_REVIEW_A11Y_IS_GOOD') elif fails <= 3: points = 3 - review = '* Genomsnittlig tillgänglighet men behöver bli bättre.\n' + review = _('TEXT_REVIEW_A11Y_IS_OK') elif fails <= 5: points = 2 - review = '* Webbplatsen är dålig på tillgänglighet för funktions­varierade personer.\n' + review = _('TEXT_REVIEW_A11Y_IS_BAD') elif fails > 5: points = 1 - review = '* Väldigt dålig tillgänglighet!\n' - - review += '* Antal problem med tillgänglighet: {} st\n'.format(fails) + review = _('TEXT_REVIEW_A11Y_IS_VERY_BAD') - if fails != 0: - review += '\nTillgänglighets­problem:\n' + review += _('TEXT_REVIEW_A11Y_IS_VERY_BAD').format(fails) for key, value in return_dict.items(): if value == 0: review += '* {}\n'.format(fail_dict[key]) - #print(key) return (points, review, return_dict) From daf209698c09e225616b8d5f55072cabc611efbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sun, 20 Dec 2020 21:49:40 +0100 Subject: [PATCH 32/49] Update a11y_lighthouse.po --- locales/sv/LC_MESSAGES/a11y_lighthouse.po | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/locales/sv/LC_MESSAGES/a11y_lighthouse.po b/locales/sv/LC_MESSAGES/a11y_lighthouse.po index 0c5d5ed8..ba7a4ee8 100644 --- a/locales/sv/LC_MESSAGES/a11y_lighthouse.po +++ b/locales/sv/LC_MESSAGES/a11y_lighthouse.po @@ -17,16 +17,16 @@ msgstr "" msgid "TEXT_REVIEW_A11Y_VERY_GOOD" msgstr "* Webbplatsen har inga uppenbara fel inom tillgänglighet!\n" -msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgid "TEXT_REVIEW_A11Y_IS_GOOD" msgstr "* Webbplatsen kan bli mer tillgänglig, men är helt ok.\n" -msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgid "TEXT_REVIEW_A11Y_IS_OK" msgstr "* Genomsnittlig tillgänglighet men behöver bli bättre.\n" -msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgid "TEXT_REVIEW_A11Y_IS_BAD" msgstr "* Webbplatsen är dålig på tillgänglighet för funktions­varierade personer.\n" -msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgid "TEXT_REVIEW_A11Y_IS_VERY_BAD" msgstr "* Väldigt dålig tillgänglighet!\n" msgid "TEXT_REVIEW_A11Y_NUMBER_OF_PROBLEMS" From 8a03a49d7de84bf75f62a86d34d63ea69f4eda6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sun, 20 Dec 2020 21:57:16 +0100 Subject: [PATCH 33/49] Files regarding 'good practice' according to Google Lighthouse --- .../LC_MESSAGES/best_practice_lighthouse.po | 36 +++++++++++++++++++ .../LC_MESSAGES/best_practice_lighthouse.po | 36 +++++++++++++++++++ tests/best_practice_lighthouse.py | 14 ++++---- 3 files changed, 79 insertions(+), 7 deletions(-) create mode 100644 locales/en/LC_MESSAGES/best_practice_lighthouse.po create mode 100644 locales/sv/LC_MESSAGES/best_practice_lighthouse.po diff --git a/locales/en/LC_MESSAGES/best_practice_lighthouse.po b/locales/en/LC_MESSAGES/best_practice_lighthouse.po new file mode 100644 index 00000000..65e9b577 --- /dev/null +++ b/locales/en/LC_MESSAGES/best_practice_lighthouse.po @@ -0,0 +1,36 @@ +# English +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_REVIEW_PRACTICE_VERY_GOOD" +msgstr "* The website is within good practice according to Google Lighthouse!\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_GOOD" +msgstr "* The website can still improve some on best practice.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_OK" +msgstr "* About average on following good practice\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_BAD" +msgstr "* The website is pretty bad on what is considered good practice.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_VERY_BAD" +msgstr "* The website is really bad on adhering to good practice for the web!\n" + +msgid "TEXT_REVIEW_PRACTICE_NUMBER_OF_PROBLEMS" +msgstr "* Number of problem(s) regarding good practice: {}\n" + +msgid "TEXT_REVIEW_PRACTICE_PROBLEMS" +msgstr "\nProblem(s):\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/best_practice_lighthouse.po b/locales/sv/LC_MESSAGES/best_practice_lighthouse.po new file mode 100644 index 00000000..f3e7b353 --- /dev/null +++ b/locales/sv/LC_MESSAGES/best_practice_lighthouse.po @@ -0,0 +1,36 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_REVIEW_PRACTICE_VERY_GOOD" +msgstr "* Webbplatsen följer god praxis fullt ut!\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_GOOD" +msgstr "* Webbplatsen har ändå förbättrings­potential.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_OK" +msgstr "* Genomsnittlig efterlevnad till praxis.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_BAD" +msgstr "* Webbplatsen är ganska dålig på att följa god praxis.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_VERY_BAD" +msgstr "* Webbplatsen är inte alls bra på att följa praxis!\n" + +msgid "TEXT_REVIEW_PRACTICE_NUMBER_OF_PROBLEMS" +msgstr "* Antal problem med god praxis: {} st\n" + +msgid "TEXT_REVIEW_PRACTICE_PROBLEMS" +msgstr "\nProblem:\n" \ No newline at end of file diff --git a/tests/best_practice_lighthouse.py b/tests/best_practice_lighthouse.py index 0ba39cf8..daa1952c 100644 --- a/tests/best_practice_lighthouse.py +++ b/tests/best_practice_lighthouse.py @@ -63,25 +63,25 @@ def run_test(langCode, url, strategy='mobile', category='best-practices'): if fails == 0: points = 5 - review = '* Webbplatsen följer god praxis fullt ut!\n' + review = _('TEXT_REVIEW_PRACTICE_VERY_GOOD') elif fails <= 2: points = 4 - review = '* Webbplatsen har ändå förbättrings­potential.\n' + review = _('TEXT_REVIEW_PRACTICE_IS_GOOD') elif fails <= 3: points = 3 - review = '* Genomsnittlig efterlevnad till praxis.\n' + review = _('TEXT_REVIEW_PRACTICE_IS_OK') elif fails <= 4: points = 2 - review = '* Webbplatsen är ganska dålig på att följa god praxis.\n' + review = _('TEXT_REVIEW_PRACTICE_IS_BAD') elif fails > 4: points = 1 - review = '* Webbplatsen är inte alls bra på att följa praxis!\n' + review = _('TEXT_REVIEW_PRACTICE_IS_VERY_BAD') - review += '* Antal problem med god praxis: {} st\n'.format(fails) + review += _('TEXT_REVIEW_PRACTICE_NUMBER_OF_PROBLEMS').format(fails) if fails != 0: - review += '\nProblem:\n' + review += _('TEXT_REVIEW_PRACTICE_PROBLEMS') for key, value in return_dict.items(): if value == 0: From ac5c5b74cfb5b98e62f34d975801be36205b9608 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sun, 20 Dec 2020 22:04:07 +0100 Subject: [PATCH 34/49] enable skip and take on all engines ( #17 ) --- default.py | 26 ++++++++++++++++++++++---- engines/csv.py | 22 +++++++++++++--------- engines/json.py | 35 ++++++++++++++++++++++++----------- engines/sitemap.py | 14 ++++++++------ engines/sql.py | 20 +++++++++++++------- engines/sqlite.py | 29 ++++++++++++++++++++--------- engines/utils.py | 11 +++++++++++ 7 files changed, 111 insertions(+), 46 deletions(-) create mode 100644 engines/utils.py diff --git a/default.py b/default.py index e6df6195..fc3d453d 100644 --- a/default.py +++ b/default.py @@ -182,6 +182,8 @@ def main(argv): sites = list() output_filename = '' input_filename = '' + input_skip = 0 + input_take = -1 show_reviews = False show_help = False add_url = '' @@ -198,7 +200,7 @@ def main(argv): try: opts, args = getopt.getopt(argv, "hu:t:i:o:rA:D:L:", [ - "help", "url", "test", "input", "output", "review", "report", "addUrl", "deleteUrl", "language"]) + "help", "url=", "test=", "input=", "output=", "review", "report", "addUrl=", "deleteUrl=", "language=", "input-skip=", "input-take="]) except getopt.GetoptError: print(main.__doc__) sys.exit(2) @@ -267,13 +269,26 @@ def main(argv): if file_long_ending == ".sqlite": from engines.sqlite import read_sites, add_site, delete_site - if (file_ending == ".csv"): + elif (file_ending == ".csv"): from engines.csv import read_sites, add_site, delete_site - if (file_ending == ".xml"): # https://example.com/sitemap.xml + elif (file_ending == ".xml"): # https://example.com/sitemap.xml from engines.sitemap import read_sites, add_site, delete_site else: from engines.json import read_sites, add_site, delete_site - sites = read_sites(input_filename) + pass + elif opt in ("--input-skip"): # specifies number of items to skip in the begining + try: + input_skip = int(arg) + except Exception: + print(_('TEXT_COMMAND_USAGE')) + sys.exit(2) + pass + elif opt in ("--input-take"): # specifies number of items to take + try: + input_take = int(arg) + except Exception: + print(_('TEXT_COMMAND_USAGE')) + sys.exit(2) pass elif opt in ("-o", "--output"): # output file path output_filename = arg @@ -286,6 +301,9 @@ def main(argv): print(_('TEXT_COMMAND_USAGE')) sys.exit(2) + if (input_filename != ''): + sites = read_sites(input_filename, input_skip, input_take) + if (add_url != ''): # check if website url should be added sites = add_site(input_filename, add_url) diff --git a/engines/csv.py b/engines/csv.py index be874ca1..452e9fca 100644 --- a/engines/csv.py +++ b/engines/csv.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- from models import Sites, SiteTests +from engines.utils import use_website import csv @@ -12,7 +13,7 @@ def write_tests(output_filename, siteTests): def add_site(input_filename, url): - sites = read_sites(input_filename) + sites = read_sites(input_filename, 0, -1) # print(sites) id = len(sites) sites.append([id, url]) @@ -24,7 +25,7 @@ def add_site(input_filename, url): def delete_site(input_filename, url): - sites = read_sites(input_filename) + sites = read_sites(input_filename, 0, -1) tmpSites = list() for site in sites: site_id = site[0] @@ -39,30 +40,33 @@ def delete_site(input_filename, url): return tmpSites -def read_sites(input_filename): +def read_sites(input_filename, input_skip, input_take): sites = list() with open(input_filename, newline='') as csvfile: dialect = csv.Sniffer().sniff(csvfile.read(1024)) csvfile.seek(0) - reader = csv.reader(csvfile, dialect) + reader = csv.reader(csvfile, dialect) with open(input_filename, newline='') as csvfile: csv_reader = csv.reader(csvfile, delimiter=',', quotechar='|') - current_siteid = 0 + current_index = 0 for row in csv_reader: number_of_fields = len(Sites.fieldnames()) current_number_of_fields = len(row) if number_of_fields == current_number_of_fields: # ignore first row as that is our header info - if current_siteid != 0: + if current_index != 0 and use_website(current_index + 1, input_skip, input_take): sites.append([row[0], row[1]]) elif current_number_of_fields == 1: # we have no header and only one colmn, use column as website url - sites.append([current_siteid, "".join(row)]) - current_siteid += 1 + if use_website(current_index, input_skip, input_take): + sites.append([current_index, "".join(row)]) + current_index += 1 + return sites + def write_sites(output_filename, sites): sites_output = list() for site in sites: @@ -71,7 +75,7 @@ def write_sites(output_filename, sites): site_object = Sites(id=site_id, website=site_url).todata() sites_output.append(site_object) - with open(output_filename, 'w', newline='') as csvfile: + with open("output-" + output_filename, 'w', newline='') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=Sites.fieldnames()) writer.writeheader() diff --git a/engines/json.py b/engines/json.py index a18bfb35..71430fa6 100644 --- a/engines/json.py +++ b/engines/json.py @@ -1,9 +1,11 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- +from engines.utils import use_website import json + def add_site(input_filename, url): - sites = read_sites(input_filename) - #print(sites) + sites = read_sites(input_filename, 0, -1) + # print(sites) id = len(sites) sites.append([id, url]) write_sites(input_filename, sites) @@ -12,29 +14,38 @@ def add_site(input_filename, url): return sites + def delete_site(input_filename, url): - sites = read_sites(input_filename) + sites = read_sites(input_filename, 0, -1) tmpSites = list() for site in sites: site_id = site[0] site_url = site[1] if (url != site_url): tmpSites.append([site_id, site_url]) - + write_sites(input_filename, tmpSites) print(_('TEXT_WEBSITE_URL_DELETED').format(site_url)) return tmpSites -def read_sites(input_filename): + +def read_sites(input_filename, input_skip, input_take): + + print('A') + sites = list() with open(input_filename) as json_input_file: data = json.load(json_input_file) + current_index = 0 for site in data["sites"]: - sites.append([site["id"], site["url"]]) + if use_website(current_index, input_skip, input_take): + sites.append([site["id"], site["url"]]) + current_index += 1 return sites + def write_tests(output_filename, siteTests): with open(output_filename, 'w') as outfile: # json require us to have an object as root element @@ -43,18 +54,20 @@ def write_tests(output_filename, siteTests): } json.dump(testsContainerObject, outfile) + def write_sites(output_filename, sites): with open(output_filename, 'w') as outfile: # json require us to have an object as root element jsonSites = list() + current_siteid = 0 for site in sites: jsonSites.append({ - 'id': site[0], - 'url': site[1] - }) + 'id': site[0], + 'url': site[1] + }) + current_siteid += 1 sitesContainerObject = { "sites": jsonSites } json.dump(sitesContainerObject, outfile) - diff --git a/engines/sitemap.py b/engines/sitemap.py index 0012d832..39373ead 100644 --- a/engines/sitemap.py +++ b/engines/sitemap.py @@ -1,11 +1,12 @@ # -*- coding: utf-8 -*- from models import Sites, SiteTests +from engines.utils import use_website import config from tests.utils import * import re -def read_sites(input_sitemap_url): +def read_sites(input_sitemap_url, input_skip, input_take): sites = list() sitemap_content = httpRequestGetContent(input_sitemap_url) @@ -13,20 +14,21 @@ def read_sites(input_sitemap_url): regex = r"(?P[^<]+)<" matches = re.finditer(regex, sitemap_content, re.MULTILINE) - current_siteid = 0 + current_index = 0 for matchNum, match in enumerate(matches, start=1): item_url = match.group('itemurl') - sites.append([current_siteid, item_url]) - current_siteid += 1 + if use_website(current_index, input_skip, input_take): + sites.append([current_index, item_url]) + current_index += 1 return sites def add_site(input_filename, url): print("WARNING: sitemap engine is a read only method for testing all pages in a sitemap.xml, NO changes will be made") - sites = read_sites(input_filename) + sites = read_sites(input_filename, 0, -1) return sites @@ -34,6 +36,6 @@ def add_site(input_filename, url): def delete_site(input_filename, url): print("WARNING: sitemap engine is a read only method for testing all pages in a sitemap.xml, NO changes will be made") - sites = read_sites(input_filename) + sites = read_sites(input_filename, 0, -1) return sites diff --git a/engines/sql.py b/engines/sql.py index 4df0131f..33275702 100644 --- a/engines/sql.py +++ b/engines/sql.py @@ -1,10 +1,16 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- +from engines.utils import use_website -def write_tests(output_filename, siteTests): + +def write_tests(output_filename, siteTests, input_skip, input_take): with open(output_filename, 'w') as outfile: + current_index = 0 for test in siteTests: - format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating) - VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n""" - sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], report=test["report"], json=test["data"], recent=1, rating=test["rating"]) - - outfile.write(sql_command) \ No newline at end of file + if use_website(current_index, input_skip, input_take): + format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating) + VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n""" + sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], + report=test["report"], json=test["data"], recent=1, rating=test["rating"]) + + current_index += 1 + outfile.write(sql_command) diff --git a/engines/sqlite.py b/engines/sqlite.py index 74ba65db..1a7f3e8c 100644 --- a/engines/sqlite.py +++ b/engines/sqlite.py @@ -1,6 +1,8 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- +from engines.utils import use_website import sqlite3 + def db_tables(output_filename): conn = sqlite3.connect(output_filename) c = conn.cursor() @@ -12,6 +14,7 @@ def db_tables(output_filename): conn.close() + def add_site(input_filename, url): conn = sqlite3.connect(input_filename) c = conn.cursor() @@ -28,6 +31,7 @@ def add_site(input_filename, url): return read_sites(input_filename) + def delete_site(input_filename, url): conn = sqlite3.connect(input_filename) c = conn.cursor() @@ -39,22 +43,27 @@ def delete_site(input_filename, url): conn.close() - print(_('TEXT_WEBSITE_URL_DELETED').format(site_url)) + print(_('TEXT_WEBSITE_URL_DELETED').format(url)) - return read_sites(input_filename) + return read_sites(input_filename, 0, -1) -def read_sites(input_filename): + +def read_sites(input_filename, input_skip, input_take): sites = list() - order_by='title ASC' + order_by = 'title ASC' conn = sqlite3.connect(input_filename) c = conn.cursor() + current_index = 0 for row in c.execute('SELECT id, website FROM sites WHERE active=1 ORDER BY {0}'.format(order_by)): - sites.append([row[0], row[1]]) + if use_website(current_index, input_skip, input_take): + sites.append([row[0], row[1]]) + current_index += 1 conn.close() return sites + def write_tests(output_filename, siteTests): conn = sqlite3.connect(output_filename) c = conn.cursor() @@ -62,7 +71,8 @@ def write_tests(output_filename, siteTests): for test in siteTests: # set previous testresult as not latest format_str = """UPDATE sitetests SET most_recent=0 WHERE site_id="{siteid}" AND type_of_test="{testtype}" AND most_recent=1;\n""" - sql_command = format_str.format(siteid=test["site_id"], testtype=test["type_of_test"]) + sql_command = format_str.format( + siteid=test["site_id"], testtype=test["type_of_test"]) c.execute(sql_command) conn.commit() @@ -70,9 +80,10 @@ def write_tests(output_filename, siteTests): # update testresult for all sites format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating) VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n""" - sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], report=test["report"], json=test["data"], recent=1, rating=test["rating"]) + sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], + report=test["report"], json=test["data"], recent=1, rating=test["rating"]) c.execute(sql_command) conn.commit() - conn.close() \ No newline at end of file + conn.close() diff --git a/engines/utils.py b/engines/utils.py new file mode 100644 index 00000000..4edc9cb5 --- /dev/null +++ b/engines/utils.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- + + +def use_website(current_index, skip, take): + if skip > 0 and current_index < skip: + return False + + if take != -1 and current_index >= (skip + take): + return False + + return True From 42c09cb282a9a086a90e99b3f2037827929235ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sun, 20 Dec 2020 22:16:56 +0100 Subject: [PATCH 35/49] Lang files for PWA-test --- locales/en/LC_MESSAGES/pwa_lighthouse.po | 36 ++++++++++++++++++++++++ locales/sv/LC_MESSAGES/pwa_lighthouse.po | 36 ++++++++++++++++++++++++ tests/pwa_lighthouse.py | 14 ++++----- 3 files changed, 79 insertions(+), 7 deletions(-) create mode 100644 locales/en/LC_MESSAGES/pwa_lighthouse.po create mode 100644 locales/sv/LC_MESSAGES/pwa_lighthouse.po diff --git a/locales/en/LC_MESSAGES/pwa_lighthouse.po b/locales/en/LC_MESSAGES/pwa_lighthouse.po new file mode 100644 index 00000000..ac12b100 --- /dev/null +++ b/locales/en/LC_MESSAGES/pwa_lighthouse.po @@ -0,0 +1,36 @@ +# English +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_REVIEW_PWA_VERY_GOOD" +msgstr "* The website is all out progressive web app!\n" + +msgid "TEXT_REVIEW_PWA_IS_GOOD" +msgstr "* Pretty good on the requirements of progressive web apps!\n" + +msgid "TEXT_REVIEW_PWA_IS_OK" +msgstr "* About average support of Progressive Web App technology!\n" + +msgid "TEXT_REVIEW_PWA_IS_BAD" +msgstr "* Pretty bad as a progressive web app (PWA)!\n" + +msgid "TEXT_REVIEW_PWA_IS_VERY_BAD" +msgstr "* Really bad as a progressive web app (PWA) :/\n" + +msgid "TEXT_REVIEW_PWA_NUMBER_OF_PROBLEMS" +msgstr "* Number of problems regarding progressive web apps: {} st\n" + +msgid "TEXT_REVIEW_PWA_PROBLEMS" +msgstr "\nProblem(s):\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/pwa_lighthouse.po b/locales/sv/LC_MESSAGES/pwa_lighthouse.po new file mode 100644 index 00000000..24940144 --- /dev/null +++ b/locales/sv/LC_MESSAGES/pwa_lighthouse.po @@ -0,0 +1,36 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_REVIEW_PWA_VERY_GOOD" +msgstr "* Webbplatsen följer fullt ut praxis för progressiva webbappar!\n" + +msgid "TEXT_REVIEW_PWA_IS_GOOD" +msgstr "* Webbplatsen har lite förbättrings­potential för en progressiv webbapp.\n" + +msgid "TEXT_REVIEW_PWA_IS_OK" +msgstr "* Genomsnittlig efterlevnad till praxis för progressiva webbappar.\n" + +msgid "TEXT_REVIEW_PWA_IS_BAD" +msgstr "* Webbplatsen är ganska dålig som progressiv webbapp.\n" + +msgid "TEXT_REVIEW_PWA_IS_VERY_BAD" +msgstr "* Webbplatsen är inte alls bra som progressiv webbapp :/\n" + +msgid "TEXT_REVIEW_PWA_NUMBER_OF_PROBLEMS" +msgstr "* Antal problem med praxis för progressiva webbappar: {} st\n" + +msgid "TEXT_REVIEW_PWA_PROBLEMS" +msgstr "\nProblem:\n" \ No newline at end of file diff --git a/tests/pwa_lighthouse.py b/tests/pwa_lighthouse.py index 5aaaa508..ed96d346 100644 --- a/tests/pwa_lighthouse.py +++ b/tests/pwa_lighthouse.py @@ -63,25 +63,25 @@ def run_test(langCode, url, strategy='mobile', category='pwa'): if fails == 0: points = 5 - #review = '* Webbplatsen följer fullt ut praxis för progressiva webbappar!\n' + review = _('TEXT_REVIEW_PRACTICE_VERY_GOOD') elif fails <= 4: points = 4 - #review = '* Webbplatsen har lite förbättrings­potential för en progressiv webbapp.\n' + review = _('TEXT_REVIEW_PWA_IS_GOOD') elif fails <= 7: points = 3 - #review = '* Genomsnittlig efterlevnad till praxis för progressiva webbappar.\n' + review = _('TEXT_REVIEW_PWA_IS_OK') elif fails <= 9: points = 2 - #review = '* Webbplatsen är ganska dålig som progressiv webbapp.\n' + review = _('TEXT_REVIEW_PWA_IS_BAD') elif fails > 9: points = 1 - #review = '* Webbplatsen är inte alls bra som progressiv webbapp :/\n' + review = _('TEXT_REVIEW_PWA_IS_VERY_BAD') - review += '* Antal problem med praxis för progressiva webbappar: {} st\n'.format(fails) + review += _('TEXT_REVIEW_PWA_NUMBER_OF_PROBLEMS').format(fails) if fails != 0: - review += '\nProblem:\n' + review += _('TEXT_REVIEW_PWA_PROBLEMS') for key, value in return_dict.items(): if value == 0: From fe0b754a67ea3a1e6a8efb01247fa29d27a883f0 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sun, 20 Dec 2020 22:18:47 +0100 Subject: [PATCH 36/49] generated translations --- locales/en/LC_MESSAGES/a11y_lighthouse.mo | Bin 0 -> 551 bytes .../en/LC_MESSAGES/best_practice_lighthouse.mo | Bin 0 -> 1068 bytes locales/sv/LC_MESSAGES/a11y_lighthouse.mo | Bin 0 -> 966 bytes .../sv/LC_MESSAGES/best_practice_lighthouse.mo | Bin 0 -> 1002 bytes 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 locales/en/LC_MESSAGES/a11y_lighthouse.mo create mode 100644 locales/en/LC_MESSAGES/best_practice_lighthouse.mo create mode 100644 locales/sv/LC_MESSAGES/a11y_lighthouse.mo create mode 100644 locales/sv/LC_MESSAGES/best_practice_lighthouse.mo diff --git a/locales/en/LC_MESSAGES/a11y_lighthouse.mo b/locales/en/LC_MESSAGES/a11y_lighthouse.mo new file mode 100644 index 0000000000000000000000000000000000000000..60250c389b036721106bca01fa40b2e2bcba678d GIT binary patch literal 551 zcmZXQ!EVz)5QdH5mPSdy}71ea-;#77^~1$XDdSe9#{Oe1WoLr8?K4hGKlZ*Q^eP;Whh=Y*;HAZlbe zihT%N)r~YeII~pV1Cu!6cjE`1?@n4}QPCxRN7GvTVqbNQkm@y5JE0BQH`n=_st)0_l-8Z0LQOB;uyG$k*Vkx=W?t7r;GMY63s@ZXC2DB5(CM~paBtvN q+8v0hLStkp8)@n2*KY%y@9_QxNCTqlgr;Ge9_W)BvQliGxy~N|m7pB} literal 0 HcmV?d00001 diff --git a/locales/en/LC_MESSAGES/best_practice_lighthouse.mo b/locales/en/LC_MESSAGES/best_practice_lighthouse.mo new file mode 100644 index 0000000000000000000000000000000000000000..47ea31c69e69a846fbf3673f433bcb70621fd2f4 GIT binary patch literal 1068 zcmaJ<%Wl&^6g7_pio~W%Ub?J!R1Qg@qB5nzN!?&Ij-%8qEvgia?HhZLGh@w68Y0AJ zu!9x3Q`RI7&o_p>Y+dr1Bez54eMz}>dtQP-VAL;of;S-^% ze5^sLCU%JL5bqOj5T*o|)?Y4L)>Go&#LL8uE0(oId`SG5_%rc! z;_s*XKZ&i#+mD6=Z_oE$4f_MP9rkS&#QHrHg=?# z&;>J6IS??FOf;;IUraGBQH|;vH%KP33Cqxqkc}PiL{@N}!@8m6oG|bM?<{_^-l(;u zFeqrqtfaIC-=R zKsn3MRA%*Z2F?0v&3Xt?jzFO`HwauqsYo${n57^EOmk+c-9!q_Q&gBjCS^)0mKdI3 z{k%iM1ZcyH0{FO8@(4l8F;-SFYBS&G#&TkS9Z|sSSTd4DA*U36e#rS5l!rcMQT$EG z(i~OQX5@S)jHCi{a=^QlO>Z)eQGpy$5*ZgbzONsUS;kcL&gPpPc=x_$)#wL16*?CL fnR1hJG2aL*N#uzk1?otd6$o9P&f^Q)g_=Oj;tu z4<(hVhj6E3rlVleo}Z*9>Ew_%(5j_ye)w zJ1_ly+vzr)Q(-~N-r2Vs4v?zT^bSic&u2Rlxu#pT%Qae1tyC+_tkMT& z#d@-_PWuW8SsPE|!sU;y=h$uLdTiH*7oO!d*I#*uPU(Ov&3ws)O1Ms?1umC^iGr>1 z^D_)Gl*6*ZZIX#-!Xsop^05V-GYn%jf-OzkOG3BlIv4THa;3DB3XOuAOfyQW@vVNG zB|H{27>u}7sGlAAdu;PUSA-d&WR4i5VJspGHiKA~8YocG#38FsEy$)3>cXs?O~EX0 zl#GXPj6p!wTA_e^CLzq{NgP3z&q2pY66G@y(ZL8c4D)jd!-+U49#TC}qiOAw%a|I3 z2sE`ysabj1^n?otk{HG)FF8J9tS}Ao znbr~)rQLj%gmI+*j<8gGuLx!f%@fFE8YDQT0O4{A3*Nm4rI&}GM+bUU4wEcH5pc<2 NhzT8~<7Efx^9#KZCcyvz literal 0 HcmV?d00001 diff --git a/locales/sv/LC_MESSAGES/best_practice_lighthouse.mo b/locales/sv/LC_MESSAGES/best_practice_lighthouse.mo new file mode 100644 index 0000000000000000000000000000000000000000..5e41fb273bacd0440c13a33214ca955527a866f5 GIT binary patch literal 1002 zcmaJ<&2G~`5H>#t8YC}>{QMo_DN!?<#uA{{LLrBp&p13ZXwY9rx8zJ6+ zD>t~cPk>a33zxhDN8SORfN_dwsR%27`gSxk-+Z(3_o>?-EIM}x_X)4d;L7<(_j`m- zgfjnyaF_6n0E8ce$AqaHmi2(xA$~}_N4!EfCM;5Xe$%p^6aOTBLOgTpB94fsDSl6E z6MrV2CH_imMc!W2X?Z)ox7%s9+(zU#ypG@Q)ZNvIx%DudoD4T6_U(AB{mH^x+dWwkl_pjLIB&CgO?r9#%iqpVcHYzOdr6Kq42N5zz@7j`?X5d zZsgLSq#?tC+!{PFPYb~_xd`0@t~8qE?P!fHT-KFldZ?HuyLpny)PaRKGxj=4RJ3uz z>O%*LVTz^{s}w`1RpxDL26i!yN!n>aEDHNkK;Ko32bS@4#qc0S!`Y1F2 literal 0 HcmV?d00001 From 175ed7f222caad93a1cc4dfabe7bf65bacd4456c Mon Sep 17 00:00:00 2001 From: Mattias Date: Sun, 20 Dec 2020 22:20:01 +0100 Subject: [PATCH 37/49] generated translations for PW --- locales/en/LC_MESSAGES/pwa_lighthouse.mo | Bin 0 -> 994 bytes locales/sv/LC_MESSAGES/pwa_lighthouse.mo | Bin 0 -> 1065 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 locales/en/LC_MESSAGES/pwa_lighthouse.mo create mode 100644 locales/sv/LC_MESSAGES/pwa_lighthouse.mo diff --git a/locales/en/LC_MESSAGES/pwa_lighthouse.mo b/locales/en/LC_MESSAGES/pwa_lighthouse.mo new file mode 100644 index 0000000000000000000000000000000000000000..f545e385035d46c5043f8a91620a5d46fa146bda GIT binary patch literal 994 zcma)4%Wl&^6g7_pvcv|7mtb+rs->DVsi;)kQsK02iCV{z>$VgjMU&*#F>-7(Gbu!f zuR!cz$rr#6u;&|C@E;(KLq&@)5+ffS-#hnl?zO*Ox%k$?xdhw>o|eX$^A7LVfcL<4 z;1h5a_yXJlz5#cDAHWS@=A32S1Uuk0AOZyJH|H&D1^oGhzk?TG-@ow7pMWpJ{s69l zKZ32udmME`Z@=XobUO!bx3$}Cx*JvQtstoM2isNA`(AihS*g9}H@&bMY<4?g(A@F- z-M`ZRc`+?I)zMM;GwvJF8i;*XI>|GKI_~W?+8jvl_?Ucu_9rg;rvSv;08i-_gOAuo&5L=S1z!2Bc7Z~5M-{6?*AujiTJ3{55l z)Ea(i7K&6R*$VZCQfW31_M%O(d>U7ZIbtQete+2(EOuzQml%7CGgg>*Aes}0ib>3- zELJNf)Tk}l)?Ml-xF*z-1Cp8~6)KMvYn>c3VHlD{L9_TF=4Q@B$9X=WJR>t?Qv7_J zC>}wsru?X?fS%^My?ks)9z#&fq{l^(D^q6R8~$#2fR-*=8txCXJk8_DG&f|KrvIao zoP|$d8DB;ma1G$r9y{Zy(VpJ%Z8dVVOGyMWu*usVh=hoObHg0rc{nm Iu_g%p36#&iFDHGe8nY4`% zUxC=sUHJpV4`kavKw`my&p=$aphBCfBOe{l%(>^DJKt~L{Akg-MR-JbQ*QoQpXmQC z;gE2T@P%-Pa7=he_(6C=_(ix+n7C$H4~QM&MM6ZFrTvHNmK72o6E}z--yjWfOuRsR zNNf{-Bd!zwB(@^&Rn%#Dn@w-46K=VkX1h~&8{>{wf?(`9w}UI<0T`b+6S4 zmOEi9sIPf``$GAp7bm7CIYO21V%M-{!ZuN9k!KEs?$WBe;sL4J&0yUQgNQ9DN$7xSX1 zoG+wIAcaBbjlQaQbYzU84Em|w8O{}XIf>x%BAaWNK^afHI7?`U)Cv59C`g=tL literal 0 HcmV?d00001 From 74fc6d05bea060c44a785de1e83db6753cdeffb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sun, 20 Dec 2020 22:27:33 +0100 Subject: [PATCH 38/49] SEO:s langfiles --- locales/en/LC_MESSAGES/seo_lighthouse.po | 36 ++++++++++++++++++++++++ locales/sv/LC_MESSAGES/seo_lighthouse.po | 36 ++++++++++++++++++++++++ tests/seo_lighthouse.py | 16 ++++++----- 3 files changed, 81 insertions(+), 7 deletions(-) create mode 100644 locales/en/LC_MESSAGES/seo_lighthouse.po create mode 100644 locales/sv/LC_MESSAGES/seo_lighthouse.po diff --git a/locales/en/LC_MESSAGES/seo_lighthouse.po b/locales/en/LC_MESSAGES/seo_lighthouse.po new file mode 100644 index 00000000..41313708 --- /dev/null +++ b/locales/en/LC_MESSAGES/seo_lighthouse.po @@ -0,0 +1,36 @@ +# English +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_REVIEW_SEO_VERY_GOOD" +msgstr "* The website is really good at SEO!!\n" + +msgid "TEXT_REVIEW_SEO_IS_GOOD" +msgstr "* Pretty good at SEO, but still has some potential improvements.\n" + +msgid "TEXT_REVIEW_SEO_IS_OK" +msgstr "* About average on search engine optimisation (SEO) according to Google.\n" + +msgid "TEXT_REVIEW_SEO_IS_BAD" +msgstr "* Pretty bad optimized for search engines (SEO).\n" + +msgid "TEXT_REVIEW_SEO_IS_VERY_BAD" +msgstr "* Really bad at SEO (Search Engine Optimisation)!\n" + +msgid "TEXT_REVIEW_SEO_NUMBER_OF_PROBLEMS" +msgstr "* Number of problem(s) with search engine optmisiation (SEO): {}\n" + +msgid "TEXT_REVIEW_SEO_PROBLEMS" +msgstr "\nProblem(s):\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/seo_lighthouse.po b/locales/sv/LC_MESSAGES/seo_lighthouse.po new file mode 100644 index 00000000..4b9e9777 --- /dev/null +++ b/locales/sv/LC_MESSAGES/seo_lighthouse.po @@ -0,0 +1,36 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_REVIEW_SEO_VERY_GOOD" +msgstr "* Webbplatsen följer god SEO-praxis fullt ut!\n" + +msgid "TEXT_REVIEW_SEO_IS_GOOD" +msgstr "* Webbplatsen har ändå förbättrings­potential inom SEO.\n" + +msgid "TEXT_REVIEW_SEO_IS_OK" +msgstr "* Genomsnittlig efterlevnad till SEO-praxis.\n" + +msgid "TEXT_REVIEW_SEO_IS_BAD" +msgstr "* Webbplatsen är ganska dålig på sökmotoroptimering.\n" + +msgid "TEXT_REVIEW_SEO_IS_VERY_BAD" +msgstr "* Webbplatsen är inte alls bra på sökmotoroptimering!\n" + +msgid "TEXT_REVIEW_SEO_NUMBER_OF_PROBLEMS" +msgstr "* Antal problem med sökmotoroptimering (SEO): {} st\n" + +msgid "TEXT_REVIEW_SEO_PROBLEMS" +msgstr "\nProblem:\n" \ No newline at end of file diff --git a/tests/seo_lighthouse.py b/tests/seo_lighthouse.py index 70a47b10..574a8278 100644 --- a/tests/seo_lighthouse.py +++ b/tests/seo_lighthouse.py @@ -10,6 +10,8 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS googlePageSpeedApiKey = config.googlePageSpeedApiKey @@ -63,25 +65,25 @@ def run_test(langCode, url, strategy='mobile', category='seo'): if fails == 0: points = 5 - review = '* Webbplatsen följer god SEO-praxis fullt ut!\n' + review = _('TEXT_REVIEW_SEO_VERY_GOOD') elif fails <= 2: points = 4 - review = '* Webbplatsen har ändå förbättrings­potential inom SEO.\n' + review = _('TEXT_REVIEW_SEO_IS_GOOD') elif fails <= 3: points = 3 - review = '* Genomsnittlig efterlevnad till SEO-praxis.\n' + review = _('TEXT_REVIEW_SEO_IS_OK') elif fails <= 4: points = 2 - review = '* Webbplatsen är ganska dålig på sökmotoroptimering.\n' + review = _('TEXT_REVIEW_SEO_IS_BAD') elif fails > 4: points = 1 - review = '* Webbplatsen är inte alls bra på sökmotoroptimering!\n' + review = _('TEXT_REVIEW_SEO_IS_VERY_BAD') - review += '* Antal problem med god praxis: {} st\n'.format(fails) + review += _('TEXT_REVIEW_SEO_NUMBER_OF_PROBLEMS').format(fails) if fails != 0: - review += '\nProblem:\n' + review += _('TEXT_REVIEW_SEO_PROBLEMS') for key, value in return_dict.items(): if value != None and value < 1: From 491ac40e7363c7212660b3686af6a382553a32ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sun, 20 Dec 2020 22:27:37 +0100 Subject: [PATCH 39/49] Update a11y_lighthouse.py --- tests/a11y_lighthouse.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/a11y_lighthouse.py b/tests/a11y_lighthouse.py index 92741c40..dcab72d2 100644 --- a/tests/a11y_lighthouse.py +++ b/tests/a11y_lighthouse.py @@ -10,7 +10,6 @@ from bs4 import BeautifulSoup import config from tests.utils import * - import gettext _ = gettext.gettext From 7bfdc4f679856c4aef39ebc92d2406a05af868db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcus=20=C3=96sterberg?= Date: Sun, 20 Dec 2020 22:27:41 +0100 Subject: [PATCH 40/49] Update best_practice_lighthouse.py --- tests/best_practice_lighthouse.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/best_practice_lighthouse.py b/tests/best_practice_lighthouse.py index daa1952c..c1eef41b 100644 --- a/tests/best_practice_lighthouse.py +++ b/tests/best_practice_lighthouse.py @@ -10,6 +10,8 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS googlePageSpeedApiKey = config.googlePageSpeedApiKey From a7f253db3550260d816930e1108523a5a7247d74 Mon Sep 17 00:00:00 2001 From: Mattias Date: Sun, 20 Dec 2020 22:33:02 +0100 Subject: [PATCH 41/49] generated seo translations --- locales/en/LC_MESSAGES/seo_lighthouse.mo | Bin 0 -> 1019 bytes locales/sv/LC_MESSAGES/seo_lighthouse.mo | Bin 0 -> 999 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 locales/en/LC_MESSAGES/seo_lighthouse.mo create mode 100644 locales/sv/LC_MESSAGES/seo_lighthouse.mo diff --git a/locales/en/LC_MESSAGES/seo_lighthouse.mo b/locales/en/LC_MESSAGES/seo_lighthouse.mo new file mode 100644 index 0000000000000000000000000000000000000000..13a2d5b47e80c132c537855aa171cbe6e3c9966e GIT binary patch literal 1019 zcmZ`%O>fgc5H%kX$PyPMz66H}2h>s=l88!`DHTrZ2CH=(9Jf$}6s?nqv&i;ZyPKAP zxbY7VH~1I)1vqi!%z+<*7$=pQs$u1)=b3#w`(}54U%mL*pmCXShw!=#=EfKLzfSl{ zxIy?vxJLL%xJ~#&xJ&p;xJjs-GmKlrHt_}_BGf7Vblxx=;_t*B@udrf(I9?KT;@L# zUm^ZMe2@4Su@Sj1qh9Fld+v)~*A04Jx7T#G7NuK3urMF&EK={g;laX6?LEKghP`0B z*9n8>uIqQtEdTGswCL1FM~XMt*OoW1_E9P>avM6%=8n^HfmB^DXq%lNvNk0$O|#Y( z(`ZAZ)~H$a27OSk*^gFND6Wyv3h|JaRgQHBt`k~*U~M_@EU zlxZPt@R{t76|84Zk1#G!4yy_`NR}sKmY@|O%WQD-B;{%Z>zcw-Lf`Y<6Z`dQ&D<2Z z#+){p6y#RpTm7&|8P6X>f5fCh{bVoNw$@H`rI{g0%gy^@!1Kh0wU}$Og*i&wfJeY@;2tn`#j@^$9q=3w051@KxN2GR;2&THzIpBM{w#P3@gCR) ze*sT`zk#j5eI2x$?nd3+Y`0v$U2nB()x}ZU%f3Hy-(MM}-f){+BP*?~do{P&_Ltg? zre9liz1D^FOAk(pp7>~}Y)5nqt4C}@C>>|gp+)g`KQ}*t4-$lY_UqOe8;Pgzwy^xdzEX$0>!0H zxXF}59#@^drrC6UUxc|(-I5k_kjY-3_XGvbkrjRX>Mcv`C3WtZr6hi%2Zx3ejtQaQd3B@u?3D4`K0L*2nma#EKE73JVC1OwV zh)kR$g^cBjAH@1XyTWLU8zP=0nnJ}dGdflntxAK|xyr&sq?C$i)JXIM{hm7X?mcN^ fTQ(k6A$QJCit&;ifqTlLGY+ZSPZC3YgNA Date: Sun, 20 Dec 2020 23:26:03 +0100 Subject: [PATCH 42/49] Update frontend_quality_yellow_lab_tools.po typo --- locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po index 0e995cb4..25063814 100644 --- a/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po +++ b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po @@ -67,7 +67,7 @@ msgid "TEXT_PAGE_BAD_CSS" msgstr "* Sidans användning av dålig CSS: {}\n" msgid "TEXT_PAGE_FONTS" -msgstr "* Sidans användning av webb typsnitt: {}\n" +msgstr "* Sidans användning av webbtypsnitt: {}\n" msgid "TEXT_SERVER_CONFIG" msgstr "* Server konfiguration: {}\n" \ No newline at end of file From b8f1d1e2cc8f9abab3c3a958055c6a9fd4033ea7 Mon Sep 17 00:00:00 2001 From: Mattias Date: Mon, 21 Dec 2020 08:54:21 +0100 Subject: [PATCH 43/49] fixed msgid for a11y --- locales/en/LC_MESSAGES/a11y_lighthouse.mo | Bin 551 -> 976 bytes locales/en/LC_MESSAGES/a11y_lighthouse.po | 8 ++++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/locales/en/LC_MESSAGES/a11y_lighthouse.mo b/locales/en/LC_MESSAGES/a11y_lighthouse.mo index 60250c389b036721106bca01fa40b2e2bcba678d..14388993ac4be2c228820864884f74ab92c8cc14 100644 GIT binary patch delta 525 zcmZvX!AiqG5QaBeD0uWxMLbOLCZ!m>h+v^=kkUh;hH6h?lZ;I;CNaCQcb0yB zr)ipfr*-V?HxK9DA9XtOA3E*1^*yWG|2OQRuZ@u%Fb&yNp=JsdUFJpLN}r@A>3OVF z=!Q|KuSv!}soXnPbe3e~hf2B;`;>B$MqFJIn>0JilQFqW7fHs`IT6>$&2{0ENgZ%T zgS{m2r@epuP$jQ8lY^hGnBk@q8nDJ>tvRbtJ`8j6Y-~4vf8Zw+Cz=8@Vqff>rZVHW Tuv9rK8iqQUuJiq^D%`&S`;d+J delta 93 zcmcb>zMRG9o)F7a1|VPtVi_Pd0dbIk50I@4#J)f*1;m9wY{$sJ&;z7(fcP|!tp}uk T0O^e%8yP1jG8;}dW7Y%!=c)`H diff --git a/locales/en/LC_MESSAGES/a11y_lighthouse.po b/locales/en/LC_MESSAGES/a11y_lighthouse.po index 0e3ee509..9cc2c836 100644 --- a/locales/en/LC_MESSAGES/a11y_lighthouse.po +++ b/locales/en/LC_MESSAGES/a11y_lighthouse.po @@ -17,16 +17,16 @@ msgstr "" msgid "TEXT_REVIEW_A11Y_VERY_GOOD" msgstr "* The website do not have any apparent issues with accessibility!!\n" -msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgid "TEXT_REVIEW_A11Y_IS_GOOD" msgstr "* The website can be more accessible, but is rather good!\n" -msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgid "TEXT_REVIEW_A11Y_IS_OK" msgstr "* The accessibility is average, but need to get better.\n" -msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgid "TEXT_REVIEW_A11Y_IS_BAD" msgstr "* The website is quite bad at accessibility and sucks for disabled people!\n" -msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgid "TEXT_REVIEW_A11Y_IS_VERY_BAD" msgstr "* The accessibility is apparently really bad!\n" msgid "TEXT_REVIEW_A11Y_NUMBER_OF_PROBLEMS" From fd4c3d8963cffac43c59761e35565ca0e6ed7678 Mon Sep 17 00:00:00 2001 From: Mattias Date: Mon, 21 Dec 2020 09:00:04 +0100 Subject: [PATCH 44/49] fixed translation for a11y --- tests/a11y_lighthouse.py | 155 +++++++++++++++++++++------------------ 1 file changed, 82 insertions(+), 73 deletions(-) diff --git a/tests/a11y_lighthouse.py b/tests/a11y_lighthouse.py index dcab72d2..6bce7113 100644 --- a/tests/a11y_lighthouse.py +++ b/tests/a11y_lighthouse.py @@ -1,10 +1,10 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import socket import ssl import json import requests -import urllib # https://docs.python.org/3/library/urllib.parse.html +import urllib # https://docs.python.org/3/library/urllib.parse.html import uuid import re from bs4 import BeautifulSoup @@ -13,77 +13,86 @@ import gettext _ = gettext.gettext -### DEFAULTS +# DEFAULTS googlePageSpeedApiKey = config.googlePageSpeedApiKey + def run_test(langCode, url, strategy='mobile', category='accessibility'): - check_url = url.strip() - - pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) - - get_content = '' - - try: - get_content = httpRequestGetContent(pagespeed_api_request) - except: # breaking and hoping for more luck with the next URL - print( - 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( - check_url, sys.exc_info()[0])) - pass - - json_content = '' - - try: - json_content = json.loads(get_content) - except: # might crash if checked resource is not a webpage - print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( - check_url, sys.exc_info()[0])) - pass - - return_dict = {} - - score = 0 - fails = 0 - fail_dict = {} - - for item in json_content['lighthouseResult']['audits'].keys(): - try: - return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] - - score = score + int(json_content['lighthouseResult']['audits'][item]['score']) - - if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: - fails += 1 - fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] - except: - # has no 'numericValue' - #print(item, 'har inget värde') - pass - - review = '' - points = 0 - - if fails == 0: - points = 5 - review = _('TEXT_REVIEW_A11Y_VERY_GOOD') - elif fails <= 2: - points = 4 - review = _('TEXT_REVIEW_A11Y_IS_GOOD') - elif fails <= 3: - points = 3 - review = _('TEXT_REVIEW_A11Y_IS_OK') - elif fails <= 5: - points = 2 - review = _('TEXT_REVIEW_A11Y_IS_BAD') - elif fails > 5: - points = 1 - review = _('TEXT_REVIEW_A11Y_IS_VERY_BAD') - - if fails != 0: - review += _('TEXT_REVIEW_A11Y_IS_VERY_BAD').format(fails) - - for key, value in return_dict.items(): - if value == 0: - review += '* {}\n'.format(fail_dict[key]) - - return (points, review, return_dict) + + language = gettext.translation( + 'a11y_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( + category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + \ + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = _('TEXT_REVIEW_A11Y_VERY_GOOD') + elif fails <= 2: + points = 4 + review = _('TEXT_REVIEW_A11Y_IS_GOOD') + elif fails <= 3: + points = 3 + review = _('TEXT_REVIEW_A11Y_IS_OK') + elif fails <= 5: + points = 2 + review = _('TEXT_REVIEW_A11Y_IS_BAD') + elif fails > 5: + points = 1 + review = _('TEXT_REVIEW_A11Y_IS_VERY_BAD') + + if fails != 0: + review += _('TEXT_REVIEW_A11Y_IS_VERY_BAD').format(fails) + + for key, value in return_dict.items(): + if value == 0: + review += '* {}\n'.format(fail_dict[key]) + + return (points, review, return_dict) From 48f781a9ef8f8afd8aa16e3078c06e42e98b5388 Mon Sep 17 00:00:00 2001 From: Mattias Date: Mon, 21 Dec 2020 09:11:36 +0100 Subject: [PATCH 45/49] fixed seo translation --- tests/seo_lighthouse.py | 163 +++++++++++++++++++++------------------- 1 file changed, 86 insertions(+), 77 deletions(-) diff --git a/tests/seo_lighthouse.py b/tests/seo_lighthouse.py index 574a8278..193e8e60 100644 --- a/tests/seo_lighthouse.py +++ b/tests/seo_lighthouse.py @@ -1,10 +1,10 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import socket import ssl import json import requests -import urllib # https://docs.python.org/3/library/urllib.parse.html +import urllib # https://docs.python.org/3/library/urllib.parse.html import uuid import re from bs4 import BeautifulSoup @@ -13,81 +13,90 @@ import gettext _ = gettext.gettext -### DEFAULTS +# DEFAULTS googlePageSpeedApiKey = config.googlePageSpeedApiKey + def run_test(langCode, url, strategy='mobile', category='seo'): - check_url = url.strip() - - pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) - - get_content = '' - - try: - get_content = httpRequestGetContent(pagespeed_api_request) - except: # breaking and hoping for more luck with the next URL - print( - 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( - check_url, sys.exc_info()[0])) - pass - - json_content = '' - - try: - json_content = json.loads(get_content) - except: # might crash if checked resource is not a webpage - print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( - check_url, sys.exc_info()[0])) - pass - - return_dict = {} - - score = 0 - fails = 0 - fail_dict = {} - - for item in json_content['lighthouseResult']['audits'].keys(): - try: - return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] - - score = score + int(json_content['lighthouseResult']['audits'][item]['score']) - - if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: - fails += 1 - fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] - except: - # has no 'numericValue' - #print(item, 'har inget värde') - pass - - review = '' - points = 0 - - if fails == 0: - points = 5 - review = _('TEXT_REVIEW_SEO_VERY_GOOD') - elif fails <= 2: - points = 4 - review = _('TEXT_REVIEW_SEO_IS_GOOD') - elif fails <= 3: - points = 3 - review = _('TEXT_REVIEW_SEO_IS_OK') - elif fails <= 4: - points = 2 - review = _('TEXT_REVIEW_SEO_IS_BAD') - elif fails > 4: - points = 1 - review = _('TEXT_REVIEW_SEO_IS_VERY_BAD') - - review += _('TEXT_REVIEW_SEO_NUMBER_OF_PROBLEMS').format(fails) - - - if fails != 0: - review += _('TEXT_REVIEW_SEO_PROBLEMS') - - for key, value in return_dict.items(): - if value != None and value < 1: - review += '* {}\n'.format(fail_dict[key]) - #print(key) - - return (points, review, return_dict) + + language = gettext.translation( + 'seo_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( + category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + \ + int(json_content['lighthouseResult'] + ['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + # print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = _('TEXT_REVIEW_SEO_VERY_GOOD') + elif fails <= 2: + points = 4 + review = _('TEXT_REVIEW_SEO_IS_GOOD') + elif fails <= 3: + points = 3 + review = _('TEXT_REVIEW_SEO_IS_OK') + elif fails <= 4: + points = 2 + review = _('TEXT_REVIEW_SEO_IS_BAD') + elif fails > 4: + points = 1 + review = _('TEXT_REVIEW_SEO_IS_VERY_BAD') + + review += _('TEXT_REVIEW_SEO_NUMBER_OF_PROBLEMS').format(fails) + + if fails != 0: + review += _('TEXT_REVIEW_SEO_PROBLEMS') + + for key, value in return_dict.items(): + if value != None and value < 1: + review += '* {}\n'.format(fail_dict[key]) + # print(key) + + return (points, review, return_dict) From 5bad2c7e5f0624faf4a120803d288dbc1538bb1b Mon Sep 17 00:00:00 2001 From: Mattias Date: Mon, 21 Dec 2020 09:26:50 +0100 Subject: [PATCH 46/49] fixed translations for best practice --- tests/best_practice_lighthouse.py | 161 ++++++++++++++++-------------- 1 file changed, 84 insertions(+), 77 deletions(-) diff --git a/tests/best_practice_lighthouse.py b/tests/best_practice_lighthouse.py index c1eef41b..6d7f46fa 100644 --- a/tests/best_practice_lighthouse.py +++ b/tests/best_practice_lighthouse.py @@ -1,10 +1,10 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import socket import ssl import json import requests -import urllib # https://docs.python.org/3/library/urllib.parse.html +import urllib # https://docs.python.org/3/library/urllib.parse.html import uuid import re from bs4 import BeautifulSoup @@ -13,81 +13,88 @@ import gettext _ = gettext.gettext -### DEFAULTS +# DEFAULTS googlePageSpeedApiKey = config.googlePageSpeedApiKey + def run_test(langCode, url, strategy='mobile', category='best-practices'): - check_url = url.strip() - - pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&strategy={2}&key={3}'.format(category, check_url, strategy, googlePageSpeedApiKey) - - get_content = '' - - try: - get_content = httpRequestGetContent(pagespeed_api_request) - except: # breaking and hoping for more luck with the next URL - print( - 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( - check_url, sys.exc_info()[0])) - pass - - json_content = '' - - try: - json_content = json.loads(get_content) - except: # might crash if checked resource is not a webpage - print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( - check_url, sys.exc_info()[0])) - pass - - return_dict = {} - - score = 0 - fails = 0 - fail_dict = {} - - for item in json_content['lighthouseResult']['audits'].keys(): - try: - return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] - - score = score + int(json_content['lighthouseResult']['audits'][item]['score']) - - if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: - fails += 1 - fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] - except: - # has no 'numericValue' - #print(item, 'har inget värde') - pass - - review = '' - points = 0 - - if fails == 0: - points = 5 - review = _('TEXT_REVIEW_PRACTICE_VERY_GOOD') - elif fails <= 2: - points = 4 - review = _('TEXT_REVIEW_PRACTICE_IS_GOOD') - elif fails <= 3: - points = 3 - review = _('TEXT_REVIEW_PRACTICE_IS_OK') - elif fails <= 4: - points = 2 - review = _('TEXT_REVIEW_PRACTICE_IS_BAD') - elif fails > 4: - points = 1 - review = _('TEXT_REVIEW_PRACTICE_IS_VERY_BAD') - - review += _('TEXT_REVIEW_PRACTICE_NUMBER_OF_PROBLEMS').format(fails) - - - if fails != 0: - review += _('TEXT_REVIEW_PRACTICE_PROBLEMS') - - for key, value in return_dict.items(): - if value == 0: - review += '* {}\n'.format(fail_dict[key]) - #print(key) - - return (points, review, return_dict) + language = gettext.translation( + 'best_practice_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&strategy={2}&key={3}'.format( + category, check_url, strategy, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + \ + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = _('TEXT_REVIEW_PRACTICE_VERY_GOOD') + elif fails <= 2: + points = 4 + review = _('TEXT_REVIEW_PRACTICE_IS_GOOD') + elif fails <= 3: + points = 3 + review = _('TEXT_REVIEW_PRACTICE_IS_OK') + elif fails <= 4: + points = 2 + review = _('TEXT_REVIEW_PRACTICE_IS_BAD') + elif fails > 4: + points = 1 + review = _('TEXT_REVIEW_PRACTICE_IS_VERY_BAD') + + review += _('TEXT_REVIEW_PRACTICE_NUMBER_OF_PROBLEMS').format(fails) + + if fails != 0: + review += _('TEXT_REVIEW_PRACTICE_PROBLEMS') + + for key, value in return_dict.items(): + if value == 0: + review += '* {}\n'.format(fail_dict[key]) + # print(key) + + return (points, review, return_dict) From fefddca5d44d35848ae3878235fd70be11f9a19b Mon Sep 17 00:00:00 2001 From: Mattias Date: Mon, 21 Dec 2020 09:31:17 +0100 Subject: [PATCH 47/49] fixed translation for pwa --- tests/pwa_lighthouse.py | 163 +++++++++++++++++++++------------------- 1 file changed, 86 insertions(+), 77 deletions(-) diff --git a/tests/pwa_lighthouse.py b/tests/pwa_lighthouse.py index ed96d346..acf6b3b4 100644 --- a/tests/pwa_lighthouse.py +++ b/tests/pwa_lighthouse.py @@ -1,91 +1,100 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import socket import ssl import json import requests -import urllib # https://docs.python.org/3/library/urllib.parse.html +import urllib # https://docs.python.org/3/library/urllib.parse.html import uuid import re from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext -### DEFAULTS +# DEFAULTS googlePageSpeedApiKey = config.googlePageSpeedApiKey + def run_test(langCode, url, strategy='mobile', category='pwa'): - check_url = url.strip() - - pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format(category, check_url, googlePageSpeedApiKey) - - get_content = '' - - try: - get_content = httpRequestGetContent(pagespeed_api_request) - except: # breaking and hoping for more luck with the next URL - print( - 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( - check_url, sys.exc_info()[0])) - pass - - json_content = '' - - try: - json_content = json.loads(get_content) - except: # might crash if checked resource is not a webpage - print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( - check_url, sys.exc_info()[0])) - pass - - return_dict = {} - - score = 0 - fails = 0 - fail_dict = {} - - for item in json_content['lighthouseResult']['audits'].keys(): - try: - return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] - - score = score + int(json_content['lighthouseResult']['audits'][item]['score']) - - if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: - fails += 1 - fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] - except: - # has no 'numericValue' - #print(item, 'har inget värde') - pass - - review = '' - points = 0 - - if fails == 0: - points = 5 - review = _('TEXT_REVIEW_PRACTICE_VERY_GOOD') - elif fails <= 4: - points = 4 - review = _('TEXT_REVIEW_PWA_IS_GOOD') - elif fails <= 7: - points = 3 - review = _('TEXT_REVIEW_PWA_IS_OK') - elif fails <= 9: - points = 2 - review = _('TEXT_REVIEW_PWA_IS_BAD') - elif fails > 9: - points = 1 - review = _('TEXT_REVIEW_PWA_IS_VERY_BAD') - - review += _('TEXT_REVIEW_PWA_NUMBER_OF_PROBLEMS').format(fails) - - - if fails != 0: - review += _('TEXT_REVIEW_PWA_PROBLEMS') - - for key, value in return_dict.items(): - if value == 0: - review += '* {}\n'.format(fail_dict[key]) - #print(key) - - return (points, review, return_dict) + language = gettext.translation( + 'pwa_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( + category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + \ + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = _('TEXT_REVIEW_PRACTICE_VERY_GOOD') + elif fails <= 4: + points = 4 + review = _('TEXT_REVIEW_PWA_IS_GOOD') + elif fails <= 7: + points = 3 + review = _('TEXT_REVIEW_PWA_IS_OK') + elif fails <= 9: + points = 2 + review = _('TEXT_REVIEW_PWA_IS_BAD') + elif fails > 9: + points = 1 + review = _('TEXT_REVIEW_PWA_IS_VERY_BAD') + + review += _('TEXT_REVIEW_PWA_NUMBER_OF_PROBLEMS').format(fails) + + if fails != 0: + review += _('TEXT_REVIEW_PWA_PROBLEMS') + + for key, value in return_dict.items(): + if value == 0: + review += '* {}\n'.format(fail_dict[key]) + # print(key) + + return (points, review, return_dict) From d339d90dc992b3935c0be20a74ca8e94a87d1e20 Mon Sep 17 00:00:00 2001 From: Mattias Date: Mon, 21 Dec 2020 17:12:02 +0100 Subject: [PATCH 48/49] moved translations to seperate files, changed display order for test --- default.py | 28 ++++++------ locales/en/LC_MESSAGES/a11y_lighthouse.mo | Bin 976 -> 1095 bytes locales/en/LC_MESSAGES/a11y_lighthouse.po | 3 ++ .../LC_MESSAGES/best_practice_lighthouse.mo | Bin 1068 -> 1186 bytes .../LC_MESSAGES/best_practice_lighthouse.po | 3 ++ locales/en/LC_MESSAGES/pwa_lighthouse.mo | Bin 994 -> 1102 bytes locales/en/LC_MESSAGES/pwa_lighthouse.po | 3 ++ locales/en/LC_MESSAGES/seo_lighthouse.mo | Bin 1019 -> 1127 bytes locales/en/LC_MESSAGES/seo_lighthouse.po | 3 ++ locales/en/LC_MESSAGES/webperf-core.mo | Bin 3338 -> 2757 bytes locales/en/LC_MESSAGES/webperf-core.po | 38 ++++------------- locales/sv/LC_MESSAGES/a11y_lighthouse.mo | Bin 966 -> 1084 bytes locales/sv/LC_MESSAGES/a11y_lighthouse.po | 3 ++ .../LC_MESSAGES/best_practice_lighthouse.mo | Bin 1002 -> 1114 bytes .../LC_MESSAGES/best_practice_lighthouse.po | 3 ++ .../frontend_quality_yellow_lab_tools.mo | Bin 1802 -> 1801 bytes locales/sv/LC_MESSAGES/pwa_lighthouse.mo | Bin 1065 -> 1185 bytes locales/sv/LC_MESSAGES/pwa_lighthouse.po | 3 ++ locales/sv/LC_MESSAGES/seo_lighthouse.mo | Bin 999 -> 1120 bytes locales/sv/LC_MESSAGES/seo_lighthouse.po | 3 ++ locales/sv/LC_MESSAGES/webperf-core.mo | Bin 3469 -> 2973 bytes locales/sv/LC_MESSAGES/webperf-core.po | 40 +++++------------- tests/a11y_lighthouse.py | 2 + tests/best_practice_lighthouse.py | 2 + tests/pwa_lighthouse.py | 2 + tests/seo_lighthouse.py | 2 + 26 files changed, 63 insertions(+), 75 deletions(-) diff --git a/default.py b/default.py index fc3d453d..384c1f1b 100644 --- a/default.py +++ b/default.py @@ -102,40 +102,36 @@ def testing(langCode, sites, test_type=TEST_ALL, show_reviews=False): if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE): tests.extend(testsites( langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) - if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y): - print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y')) - tests.extend(testsites( - langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): + tests.extend(testsites(langCode, sites, + test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_SEO): - print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO')) tests.extend(testsites( langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_SEO, show_reviews=show_reviews)) - if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_PWA): - print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA')) - tests.extend(testsites( - langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE): - print(_('TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) tests.extend(testsites( langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) - if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): - tests.extend(testsites(langCode, sites, - test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_HTML): tests.extend(testsites(langCode, sites, test_type=TEST_HTML, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_CSS): tests.extend(testsites(langCode, sites, test_type=TEST_CSS, show_reviews=show_reviews)) - if (test_type == TEST_ALL or test_type == TEST_WEBBKOLL): - tests.extend(testsites(langCode, sites, - test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_PWA): + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_STANDARD_FILES): tests.extend(testsites(langCode, sites, test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y): + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_YELLOW_LAB_TOOLS): tests.extend(testsites( langCode, sites, test_type=TEST_YELLOW_LAB_TOOLS, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_WEBBKOLL): + tests.extend(testsites(langCode, sites, + test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) return tests diff --git a/locales/en/LC_MESSAGES/a11y_lighthouse.mo b/locales/en/LC_MESSAGES/a11y_lighthouse.mo index 14388993ac4be2c228820864884f74ab92c8cc14..ab278885459d3ddf1da5a7b1d305bba413617a05 100644 GIT binary patch delta 271 zcmcb>ew?HJo)F7a1|Z-7Vi_Qg0dbJP93Wd9i01;a1Q2fpVlf~-2gD%tPk~q&h?y7} z7^Hx-8juzQ(vCpd4v2$+*c-^7#Kgd01*Go*X$v4N$;`l@2htJ*Wat0|8iBMkkX{F* zLxA+BiJdD1LtG<5;)6o{{5<{K<3n77LpB>QE?^2(#t*oHO7rqE^U@VcQj1Hh6bua% obQK(vlT(X}Gm|oNGD|8IG~Dy^({oZ4d@|EBN;2|Gi&Hfj0Ay$`<^TWy delta 151 zcmX@kae=-5o)F7a1|VPuVi_O~0dbH(50EVm#JxZ)0mMszSPY2w0WnDZRUlRd;!i*< z1;k>E3=DQaYzV~OKz;!e1A`Ti-UFm9fb@4Dtp}tHm>C#!7#O%1;(-ijpg_;WnJYF2 IGtOrM0OAD^w*UYD diff --git a/locales/en/LC_MESSAGES/a11y_lighthouse.po b/locales/en/LC_MESSAGES/a11y_lighthouse.po index 9cc2c836..deeac26c 100644 --- a/locales/en/LC_MESSAGES/a11y_lighthouse.po +++ b/locales/en/LC_MESSAGES/a11y_lighthouse.po @@ -14,6 +14,9 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 10 - Accessibility (Google Lighthouse)" + msgid "TEXT_REVIEW_A11Y_VERY_GOOD" msgstr "* The website do not have any apparent issues with accessibility!!\n" diff --git a/locales/en/LC_MESSAGES/best_practice_lighthouse.mo b/locales/en/LC_MESSAGES/best_practice_lighthouse.mo index 47ea31c69e69a846fbf3673f433bcb70621fd2f4..60d1e95d25480280f43127d48a1311ae3af9d231 100644 GIT binary patch delta 293 zcmZ3(v52$&o)F7a1|Z-BVi_P#0dbJP8X#L5h}Qx!Nd6=cO9AmSAXWfkMn(n(bs((- zq-BA$7m$_%(iuQn5JfR4~LfA|yU2)X&e;&pkfGH8^B*8RIsl5M}&; zE2uOtFEcM)p(M4q#7eE$WjO>N=z=vOioqMaL>|+7|eSZ~+ diff --git a/locales/en/LC_MESSAGES/pwa_lighthouse.po b/locales/en/LC_MESSAGES/pwa_lighthouse.po index ac12b100..488f26b5 100644 --- a/locales/en/LC_MESSAGES/pwa_lighthouse.po +++ b/locales/en/LC_MESSAGES/pwa_lighthouse.po @@ -14,6 +14,9 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 8 - PWA (Google Lighthouse)" + msgid "TEXT_REVIEW_PWA_VERY_GOOD" msgstr "* The website is all out progressive web app!\n" diff --git a/locales/en/LC_MESSAGES/seo_lighthouse.mo b/locales/en/LC_MESSAGES/seo_lighthouse.mo index 13a2d5b47e80c132c537855aa171cbe6e3c9966e..10554e806609fd0aa62730a534e3a3ebebb9410e 100644 GIT binary patch delta 282 zcmey({+y%!o)F7a1|Z-BVi_P#0dbJP8X#K?h}QzKI1nEJVo@Nz1H{rm{0WGafS8Yw zfk6UD8vZ!bCB-{ z--Zo#JTJRM`iWm*FTTS*{FM0xZQVNV$4xwpvPv#(PqCIH1N8}KW<0MVY;3|8zc8i>3zPwk2SMFuHc!Klg zF#}up8VmRyZ6j+qf`4!Tb2*Vw9L5q}M+cvwZS+m%XS5A{$06KATc6Lj8&PX)R||dV!$PI0?z(}$=qA1iV}?H&JAPg zfmT(&zML$ZMyyNQX=U;zJWt25U-JV$dEP1WJc-wIoT%Bz^%DR8Eg$^b^P)X^VWQJU z=UASboawYY&hpg!t?G{Zq|?PKx2MBI*CX5DsG8GWV|(Z<=vwWG34-=zE>L@?!G9pj hglZ`=6ore*xvzu3tf;D6Q}f0IaVh;>RO!#+*cM>tjU@m8 delta 1058 zcmajcKWGzC90%}U{#cS&N?T~rR_$x8O;U+TFtxRXLJMbNNx8H+NgX66@sggNcaXa% zf{>{WrGoI#p@I;uv5ie^brFRz5DpRrpzOE?Pa`ftQMdt*!e>y1FW@RXfGco0M3ja*j$fhJ za2!QvU=gB=HsJU*G#;TL7JdaM;2spu;qP{?7NU#hp}5i|H~_0qJjH!@4nBlpf!9#X zx8Kb-J=?=+Sx#rAV)k-6qh#bOd5IKhO(`l;Ug5jmG5*Cn$%lN5FZtNZaqn(dGPK$- z49%!9Tea+2mSi!OHqDx@vaD8Hx7W>vrLvq_ubTDSC8MGee;XJ{byg)@%N8mX)v~m* zrfc@SR^j=M!YS9fMQnxT>ZOXUISWQR>SkQ)a)pJ~DxKLW*X+C`i|kSUA<)D31IZ@E zY?chp{^$H6DdC!R__)ZMa!osYI;73C)8;xa9jC!)%5@U4c?@2_U`AQa^6$a9>4I7= x-!gT*b&hF_Tvc`5yvwqsGE+=bw^(GQQ4(*#OoLVHreUkbjVON_V*E)c{uhyg>2&}A diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index 257eb913..849babfb 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -49,52 +49,32 @@ msgstr "Exception, someone should look at this!" msgid "TEXT_TESTING_START_HEADER" msgstr "### {0} ###" -#: default.py:93 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE" -msgstr "###############################\nRunning test: 1 - Google Lighthouse Performance" - -#: default.py:96 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y" -msgstr "###############################\nRunning test: 10 - Google Lighthouse Accessibility" - -#: default.py:99 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO" -msgstr "###############################\nRunning test: 4 - Google Lighthouse SEO" - -#: default.py:102 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA" -msgstr "###############################\nRunning test: 8 - Google Lighthouse PWA" - -#: default.py:105 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" -msgstr "###############################\nRunning test: 5 - Google Lighthouse Best Practice" - #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Valid arguments for option -t/--test:" #: default.py:128 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" -msgstr "-t 1\t: Google Lighthouse Performance" +msgstr "-t 1\t: Performance (Google Lighthouse)" #: default.py:129 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" -msgstr "-t 4\t: Google Lighthouse SEO" +msgstr "-t 4\t: SEO (Google Lighthouse)" #: default.py:130 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y" -msgstr "-t 10\t: Google Lighthouse Accessibility" +msgstr "-t 10\t: Accessibility (Google Lighthouse)" #: default.py:131 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA" -msgstr "-t 8\t: Google Lighthouse PWA" +msgstr "-t 8\t: PWA (Google Lighthouse)" #: default.py:132 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" -msgstr "-t 5\t: Google Lighthouse Best Practice" +msgstr "-t 5\t: Best Practice (Google Lighthouse)" msgid "TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS" -msgstr "-t 17\t: Yellow Lab Tools (Quality on frontend)" +msgstr "-t 17\t: Quality on frontend (Yellow Lab Tools)" #: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" @@ -102,15 +82,15 @@ msgstr "-t 2\t: 404 (Page not Found)" #: default.py:134 msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" -msgstr "-t 6\t: HTML" +msgstr "-t 6\t: HTML Validation" #: default.py:135 msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" -msgstr "-t 7\t: CSS" +msgstr "-t 7\t: CSS Validation" #: default.py:136 msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" -msgstr "-t 20\t: Webbkoll" +msgstr "-t 20\t: Integrity & Security (Webbkoll)" #: default.py:137 msgid "TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES" diff --git a/locales/sv/LC_MESSAGES/a11y_lighthouse.mo b/locales/sv/LC_MESSAGES/a11y_lighthouse.mo index 07a55c01cbe4b0f0d4b49bf4826ebf61e240c7a1..586ea905b401ec3981d1b7c7124a77949b652993 100644 GIT binary patch delta 270 zcmX@czK5g!o)F7a1|Z-7Vi_Qg0dbJP93Wd9i01;a1Q2fpVlf~-2gD%tPk~q&h?y7} z7^Hx-8juzQ(vCpd4v2$+I0VR_#Kgd038bF`X)7SD$PCse#SjBzXafZrfV4G`UIV0q zf%J=soht-GTq8o_gF^lMJpJ6`LtKMHHXATbV+v8m54gM!Z!1zLNi8n1QZO`7&{YV@ m%*jbVyd*C@Co?@GwM0R~JwHD^Csn}*$SujpFD*{hWB>q39xsak delta 151 zcmdnPag4qGo)F7a1|VPuVi_O~0dbH(50EVm#JxZ)0mMszSPY2w0WnDZRUlRd;!i*< z1;k>E3=DQaYzV|4vkI6P7%YMGQ6Oyvr2hkHka{a-1_o^g1}=s;Aj29c&^2-9ip{}{ GQ<(tSNfD+1 diff --git a/locales/sv/LC_MESSAGES/a11y_lighthouse.po b/locales/sv/LC_MESSAGES/a11y_lighthouse.po index ba7a4ee8..ec59bcc7 100644 --- a/locales/sv/LC_MESSAGES/a11y_lighthouse.po +++ b/locales/sv/LC_MESSAGES/a11y_lighthouse.po @@ -14,6 +14,9 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 10 - Tillgänglighet (Google Lighthouse)" + msgid "TEXT_REVIEW_A11Y_VERY_GOOD" msgstr "* Webbplatsen har inga uppenbara fel inom tillgänglighet!\n" diff --git a/locales/sv/LC_MESSAGES/best_practice_lighthouse.mo b/locales/sv/LC_MESSAGES/best_practice_lighthouse.mo index 5e41fb273bacd0440c13a33214ca955527a866f5..0ee4e4e479f4f86a3abe45834b5b4af74a0f44d1 100644 GIT binary patch delta 287 zcmaFGev702o)F7a1|Z-BVi_P#0dbJP8X#L5h}Qx!Nd6=cO9AmSAXWfkMn(n(bs((- zq-BA$7m$_%(iuQn5JN48jxNG zq`83fRUoYmq#pojUm(rH!oV91}G~Dy^({oZ4d@|EBN;2|Gi&Hfj0GL!R A!2kdN delta 174 zcmcb`@ru3vo)F7a1|Z-7Vi_Qg0dbJP93Wd7i01+^NPZ^}O9AmUAXWh4k3g&r#6pY= z46;Dl3P{TV=?EZg2gKz-Yz^dZV`5-12hyK_v?h?&Vg~bN8A5;zHK0Hbkmdr?n}M`8 VklqKRCo3|FO+0jT^GwEKCIDw!6l(wg diff --git a/locales/sv/LC_MESSAGES/best_practice_lighthouse.po b/locales/sv/LC_MESSAGES/best_practice_lighthouse.po index f3e7b353..6cb88a0b 100644 --- a/locales/sv/LC_MESSAGES/best_practice_lighthouse.po +++ b/locales/sv/LC_MESSAGES/best_practice_lighthouse.po @@ -14,6 +14,9 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 5 - God praxis (Google Lighthouse)" + msgid "TEXT_REVIEW_PRACTICE_VERY_GOOD" msgstr "* Webbplatsen följer god praxis fullt ut!\n" diff --git a/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo index a11edf0248346023cfea1f8d08516b0698ac8881..1957f4ac615877848c13776ed6d30335206a3846 100644 GIT binary patch delta 118 zcmeC;>*Sm8B~_Dwfq{jUfk7Ea>j7ycARPpxMS*lFl6lJPk6W<}PQi~tP_4wwJ{ delta 120 zcmeC=>*Aa6B~^=ofq{jUfk7Ea>jP;eARP>(MS*k~lR=o)F7a1|Z-BVi_P#0dbJP8X#K?h}QzKI1nEJVo@Nz1H{rm{0WGafS8Yw zfk6UD8vOfkBtSo|%Ec3MkM5q-}xp zejv>Sr2hhGCm_wi!oUy;q`iUk#D^jiH!Kkhag7Lx4+{13^Yn9%4{;3+*&NJxjVVkS zKj88{ysb!~B(=E2O2I-wS0SJ%KfNfmxHz*+p*%GyDY2kHLBl;iKRqW^!6!34qa-80 Jv^Z6h0RXm#GmZcN delta 173 zcmZ3;xss#)o)F7a1|Z-7Vi_Qg0dbJP93Wc^i01;aI1q0EVo@MI1H{rm{0NAZfcPH} zO8~JVBLjmZkhTKSc0e2g#1=sQWF`iN03dw^NV@=OS!SRbAjOabWY_`)<^pLhAblH1 WI|1ouKziawk%Ly?IamI#KpMufx%h5Gq<`nku4xCV!84rZLf6t0XPaCslz zR-{mpT3lkKV4|R_5PW!Bc5Z%2eo=lwNoHqD1 F0{~eSH244j delta 173 zcmaFB@tnQ>o)F7a1|Z-7Vi_Qg0dbJP93Wc^i01;aI1q0EVo@MI1H{rm{0NAZfcPH} zO8~JVBLjmZkhTKSc0e2g#Fjw*WF`g%2OxbGNb3S=K4!4EEQ1Y@VG0x|1JYbTdJT}) V2h!Vt^u&)M6AvueT*f$=2>@ss6ypE@ diff --git a/locales/sv/LC_MESSAGES/seo_lighthouse.po b/locales/sv/LC_MESSAGES/seo_lighthouse.po index 4b9e9777..4c806d66 100644 --- a/locales/sv/LC_MESSAGES/seo_lighthouse.po +++ b/locales/sv/LC_MESSAGES/seo_lighthouse.po @@ -14,6 +14,9 @@ msgstr "" "Content-Transfer-Encoding: 8bit\n" "Generated-By: pygettext.py 1.5\n" +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 4 - Sökmotoroptimering (Google Lighthouse)" + msgid "TEXT_REVIEW_SEO_VERY_GOOD" msgstr "* Webbplatsen följer god SEO-praxis fullt ut!\n" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 6af7c751cfc86b8ec8ff60f50c66f2b9579e6bfd..bfc74782e6b4c2f042352404e1ba281f9ef19d75 100644 GIT binary patch delta 864 zcmZ|L&1(}u7zXe+w$Y|(N-b)tsdW&QR9Tx^jUPxnR!yXeLGd7TLdMOI$6d$)oh-AK)`dK(S=W<~OtZ%)I-~YUWis`Y}1YM!3?r zr*VI`lYg$KF``M#SK&B(4JY9HzMr67w*~jZ9e5bh9->3w2(iZTR$9xN(hQFaLbZnSt3g%$}AF%|Bc=s&c%WaPk?Zf+c zf(SoK;V*;wl*YhrumF?rCbS=N5847x;6eBSo`7GW?cf(Y38zMh99V#7p@Me5m(ZTf zM|c2!gR}4_MCUY)VonN!YZ%yqjbwW}_F^M5v^V}x&N+_s{HAfjy7bvqui0pbppZyh zVcauZF>Va2Y2W9pv)K~CXu|os#PGTpu*8Va@Qs;~oml&Ma-#h%nU7eDD_IqR^jvZb zn@5S;fsJ`p&a#VIyGpQTh|eTUWa%9m*D45tRz}{ax{X`aIHiJDc@}HagJ0kvJVRFesi!{CfFT>Azmj;c@%( z*utF~o)K;!4Z1jmeb?(E=yr4;Us2ae^_sqjc+sAg&H0@->`|bx(5tfS1EG{&VI^K+ MWvx}%c2diK0Jzt_fdBvi delta 1101 zcmaLUPiWIn90%~%c7JT^WE(R7RNb>1*mW!Gbggyb4nl2RE1fi2J&Y}-F^kLkV)E>~ z6@+;xdJ0}d@iOtYA%k7SQ!nDpi-@S`!BZLN$&0Apv}HsMhBlw~^1i>ky!RgMyBDZ_ z?C-cwa2;y@e^?#c_1;BDKl&fw9{3gZ!e4EB-L2ZAun*6t;Q=@c1MoVe@D3b-PvIbZ z-Lgu)p)r65eV$ff2p&fNEF6UkFbeO&Q}6{Ghu_;C>mcL^`UMz*m9~#z5dD`h3>(mz zXP}di{V)QjT~$KPq0zuctai16H@t*&Bk&O7N7it&RsKWAHXagls_Tn4ZJE@GZ2C zVjEgJ^bxgN9TdJstEa;a=`K6^fu7Sg#(`6LmNSA=;XnHS7;chLOg4x0x( z)SU6q&0%*}=VqRUSHlEJ5FltyTZFpJvvo2zRU_8j5D2STv?WMr7T;e!=~o-o2%YZ<}=^vL;-uX#8kDp#*-MiDyb@4p;@s= e1;$i^hB6gVRk*~dR)0dT>8#A9a>+d7U-%7Ud<0nl diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index 27220383..3b102df1 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -49,52 +49,32 @@ msgstr "Fel, någon behöver ta en titt på detta." msgid "TEXT_TESTING_START_HEADER" msgstr "### {0} ###" -#: default.py:93 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE" -msgstr "###############################\nKör test: 0 - Google Lighthouse prestanda" - -#: default.py:96 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_A11Y" -msgstr "###############################\nKör test: 10 - Google Lighthouse tillgänglighet" - -#: default.py:99 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_SEO" -msgstr "###############################\nKör test: 4 - Google Lighthouse sökmotoroptimering" - -#: default.py:102 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_PWA" -msgstr "###############################\nKör test: 8 - Google Lighthouse progressiv webbapp" - -#: default.py:105 -msgid "TEXT_TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" -msgstr "###############################\nKör test: 5 - Google Lighthouse god praxis" - #: default.py:127 msgid "TEXT_TEST_VALID_ARGUMENTS" msgstr "Giltiga argument att välja på -t/--test:" #: default.py:128 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" -msgstr "-t 1\t: Google Lighthouse prestanda" +msgstr "-t 1\t: Prestanda (Google Lighthouse)" #: default.py:129 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" -msgstr "-t 4\t: Google Lighthouse sökmotoroptimering" +msgstr "-t 4\t: Sökmotoroptimering (Google Lighthouse)" #: default.py:130 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y" -msgstr "-t 10\t: Google Lighthouse tillgänglighet" +msgstr "-t 10\t: Tillgänglighet (Google Lighthouse)" #: default.py:131 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA" -msgstr "-t 8\t: Google Lighthouse progressiv webbapp" +msgstr "-t 8\t: Progressiv webbapp (Google Lighthouse)" #: default.py:132 msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" -msgstr "-t 5\t: Google Lighthouse god praxis" +msgstr "-t 5\t: God praxis (Google Lighthouse)" msgid "TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS" -msgstr "-t 17\t: Yellow Lab Tools (Kvalitet på frontend)" +msgstr "-t 17\t: Kvalitet på frontend (Yellow Lab Tools)" #: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" @@ -102,15 +82,15 @@ msgstr "-t 2\t: 404 (sida finns inte)" #: default.py:134 msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" -msgstr "-t 6\t: HTML" +msgstr "-t 6\t: HTML validering" #: default.py:135 msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" -msgstr "-t 7\t: CSS" +msgstr "-t 7\t: CSS validering" #: default.py:136 msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" -msgstr "-t 20\t: Webbkoll" +msgstr "-t 20\t: Integritet & Säkerhet (Webbkoll)" #: default.py:137 msgid "TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES" @@ -118,5 +98,5 @@ msgstr "-t 9\t: Standardfiler" #: default.py:137 default.py:141 default.py:146 default.py:213 msgid "TEXT_COMMAND_USAGE" -msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test \t\t: kör ett specifikt test (ange ? för att lista tillgängliga tester)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test \t\t: kör ett specifikt test (ange ? för att lista tillgängliga tester)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-i/--input-skip \t\t: antal att hoppa över\n\t-i/--input-take \t\t: antal att testa\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" diff --git a/tests/a11y_lighthouse.py b/tests/a11y_lighthouse.py index 6bce7113..92634572 100644 --- a/tests/a11y_lighthouse.py +++ b/tests/a11y_lighthouse.py @@ -24,6 +24,8 @@ def run_test(langCode, url, strategy='mobile', category='accessibility'): language.install() _ = language.gettext + print(_('TEXT_RUNNING_TEST')) + check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( diff --git a/tests/best_practice_lighthouse.py b/tests/best_practice_lighthouse.py index 6d7f46fa..44118082 100644 --- a/tests/best_practice_lighthouse.py +++ b/tests/best_practice_lighthouse.py @@ -23,6 +23,8 @@ def run_test(langCode, url, strategy='mobile', category='best-practices'): language.install() _ = language.gettext + print(_('TEXT_RUNNING_TEST')) + check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&strategy={2}&key={3}'.format( diff --git a/tests/pwa_lighthouse.py b/tests/pwa_lighthouse.py index acf6b3b4..bfc37ea9 100644 --- a/tests/pwa_lighthouse.py +++ b/tests/pwa_lighthouse.py @@ -23,6 +23,8 @@ def run_test(langCode, url, strategy='mobile', category='pwa'): language.install() _ = language.gettext + print(_('TEXT_RUNNING_TEST')) + check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( diff --git a/tests/seo_lighthouse.py b/tests/seo_lighthouse.py index 193e8e60..fbded5c8 100644 --- a/tests/seo_lighthouse.py +++ b/tests/seo_lighthouse.py @@ -24,6 +24,8 @@ def run_test(langCode, url, strategy='mobile', category='seo'): language.install() _ = language.gettext + print(_('TEXT_RUNNING_TEST')) + check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( From 47a6a5b9039db699aab8e33322bd442a6cbd0f08 Mon Sep 17 00:00:00 2001 From: Mattias Date: Mon, 21 Dec 2020 17:40:55 +0100 Subject: [PATCH 49/49] added --input-take and --input-skip in help command --- locales/en/LC_MESSAGES/webperf-core.mo | Bin 2757 -> 2861 bytes locales/en/LC_MESSAGES/webperf-core.po | 2 +- locales/sv/LC_MESSAGES/webperf-core.mo | Bin 2973 -> 2971 bytes locales/sv/LC_MESSAGES/webperf-core.po | 2 +- 4 files changed, 2 insertions(+), 2 deletions(-) diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 5d729acc62856dd59bfb4830847d1b7d0b9153fd..6daa918dcc1d2b2d0c1307b3c290658df207982b 100644 GIT binary patch delta 294 zcmX>qx>jt$oq8r_28OAu3=HZF3=A_k7#M(v;Q*9A38a;Q{7XPu0!V)Y(%e9rpOb+> zA4p3BX;mQY4WvbZbRm#d1Jdn4S`$dG1JW8m`Ua4e1JaCK5Ly99ivjueTtH)_fs8Pq z0LXwMAPsUz2awhS(i?y@$b$P&aaQilU5sU{shRq^x|w+er6sz>*_j0jHhHDFNvTD4 roK^}DmO_4-LS{*7Zm~j1z5+-I7pE>lPf22SDgiwpC7b`UonQt4uKzf| delta 192 zcmZ20c2so2o%&f!3=C6Q85qI+KMP1p0O>v;%?+fN18IFA zy%|WW0_n#\t\t: website url to test against\n\t-t/--test \t\t: run ONE test (use ? to list available tests)\n\t-r/--review\t\t\t: show reviews in terminal\n\t-i/--input \t\t: input file path (.json/.sqlite)\n\t-o/--output \t\t: output file path (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: website url (required in compination with -i/--input)\n\t-D/--deleteUrl \t: website url (required in compination with -i/--input)\n\t-L/--language \t: language used for output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tUsage:\ndefault.py -u https://webperf.se\n\n\tOptions and arguments:\n\t-h/--help\t\t\t: Help information on how to use script\n\t-u/--url \t\t: website url to test against\n\t-t/--test \t\t: run ONE test (use ? to list available tests)\n\t-r/--review\t\t\t: show reviews in terminal\n\t-i/--input \t\t: input file path (.json/.sqlite)\n\t-i/--input-skip \t: number of items to skip\n\t-i/--input-take \t: number of items to take\n\t-o/--output \t\t: output file path (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: website url (required in compination with -i/--input)\n\t-D/--deleteUrl \t: website url (required in compination with -i/--input)\n\t-L/--language \t: language used for output(en = default/sv)" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index bfc74782e6b4c2f042352404e1ba281f9ef19d75..33df8cb3a2b921b0b2e78a110bda49e8da0318af 100644 GIT binary patch delta 195 zcmWm7tq#Ia42I#4jODg?LBDsvHr}v@PaNS7Q;b7t rjB}je7X7^qG8%hG0LZu_-Ax~&y! diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index 3b102df1..5879334d 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -98,5 +98,5 @@ msgstr "-t 9\t: Standardfiler" #: default.py:137 default.py:141 default.py:146 default.py:213 msgid "TEXT_COMMAND_USAGE" -msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test \t\t: kör ett specifikt test (ange ? för att lista tillgängliga tester)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-i/--input-skip \t\t: antal att hoppa över\n\t-i/--input-take \t\t: antal att testa\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test \t\t: kör ett specifikt test (ange ? för att lista tillgängliga tester)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-i/--input-skip \t: antal att hoppa över\n\t-i/--input-take \t: antal att testa\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)"