diff --git a/SAMPLE-config.py b/SAMPLE-config.py index 1001a3f5..00fa11e1 100644 --- a/SAMPLE-config.py +++ b/SAMPLE-config.py @@ -6,9 +6,10 @@ #### NOTE: Rename this file to 'config.py' and fill in the missing info below # useragent for HTTP requests +#useragent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0' useragent = 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)' # enter your API key for Google Pagespeed API googlePageSpeedApiKey = "" -http_request_timeout = 5 +http_request_timeout = 60 diff --git a/default.py b/default.py index 88e01eca..384c1f1b 100644 --- a/default.py +++ b/default.py @@ -1,5 +1,6 @@ -#-*- coding: utf-8 -*- -import sys, getopt +# -*- coding: utf-8 -*- +import sys +import getopt import datetime from models import Sites, SiteTests import config @@ -8,9 +9,11 @@ TEST_ALL = -1 -(TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_UNKNOWN_04, TEST_UNKNOWN_05, TEST_HTML, TEST_CSS, TEST_UNKNOWN_08, TEST_UNKNOWN_09, TEST_UNKNOWN_10, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_UNKNOWN_17, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) +(TEST_UNKNOWN_01, TEST_GOOGLE_LIGHTHOUSE, TEST_PAGE_NOT_FOUND, TEST_UNKNOWN_03, TEST_GOOGLE_LIGHTHOUSE_SEO, TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, TEST_HTML, TEST_CSS, TEST_GOOGLE_LIGHTHOUSE_PWA, TEST_STANDARD_FILES, + TEST_GOOGLE_LIGHTHOUSE_A11Y, TEST_UNKNOWN_11, TEST_UNKNOWN_12, TEST_UNKNOWN_13, TEST_UNKNOWN_14, TEST_UNKNOWN_15, TEST_UNKNOWN_16, TEST_YELLOW_LAB_TOOLS, TEST_UNKNOWN_18, TEST_UNKNOWN_19, TEST_WEBBKOLL) = range(21) -def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last_hours=24, order_by='title ASC'): + +def testsites(langCode, sites, test_type=None, show_reviews=False, only_test_untested_last_hours=24, order_by='title ASC'): """ Executing the actual tests. Attributes: @@ -34,18 +37,30 @@ def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last the_test_result = None try: - if test_type == 2: + if test_type == TEST_PAGE_NOT_FOUND: from tests.page_not_found import run_test - elif test_type == 6: - from tests.w3c_validate_html import run_test - elif test_type == 7: - from tests.w3c_validate_css import run_test - elif test_type == 20: - from tests.privacy_webbhollen import run_test - elif test_type == 1: - from tests.lighthouse import run_test - - the_test_result = run_test(website) + elif test_type == TEST_HTML: + from tests.html_validator_w3c import run_test + elif test_type == TEST_CSS: + from tests.css_validator_w3c import run_test + elif test_type == TEST_WEBBKOLL: + from tests.privacy_webbkollen import run_test + elif test_type == TEST_GOOGLE_LIGHTHOUSE: + from tests.performance_lighthouse import run_test + elif test_type == TEST_GOOGLE_LIGHTHOUSE_SEO: + from tests.seo_lighthouse import run_test + elif test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE: + from tests.best_practice_lighthouse import run_test + elif test_type == TEST_GOOGLE_LIGHTHOUSE_PWA: + from tests.pwa_lighthouse import run_test + elif test_type == TEST_STANDARD_FILES: + from tests.standard_files import run_test + elif test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y: + from tests.a11y_lighthouse import run_test + elif test_type == TEST_YELLOW_LAB_TOOLS: + from tests.frontend_quality_yellow_lab_tools import run_test + + the_test_result = run_test(langCode, website) if the_test_result != None: print(_('TEXT_SITE_RATING'), the_test_result[0]) @@ -59,55 +74,87 @@ def testsites(sites, test_type=None, show_reviews=False, only_test_untested_last json_data = '' pass - checkreport = str(the_test_result[1]).encode('utf-8') # för att lösa encoding-probs - jsondata = str(json_data).encode('utf-8') # --//-- + checkreport = str(the_test_result[1]).encode( + 'utf-8') # för att lösa encoding-probs + jsondata = str(json_data).encode('utf-8') # --//-- - site_test = SiteTests(site_id=site_id, type_of_test=test_type, check_report=checkreport, rating=the_test_result[0], test_date=datetime.datetime.now(), json_check_data=jsondata).todata() + site_test = SiteTests(site_id=site_id, type_of_test=test_type, check_report=checkreport, + rating=the_test_result[0], test_date=datetime.datetime.now(), json_check_data=jsondata).todata() result.append(site_test) - the_test_result = None # 190506 för att inte skriva testresultat till sajter när testet kraschat. Måste det sättas till ''? + # 190506 för att inte skriva testresultat till sajter när testet kraschat. Måste det sättas till ''? + the_test_result = None except Exception as e: print(_('TEXT_EXCEPTION'), website, '\n', e) pass i += 1 - + return result -def testing(sites, test_type= TEST_ALL, show_reviews= False): - print(_('TEXT_TESTING_START_HEADER').format(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) + +def testing(langCode, sites, test_type=TEST_ALL, show_reviews=False): + print(_('TEXT_TESTING_START_HEADER').format( + datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))) tests = list() ############## if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE): - print(_('TEXT_TEST_GOOGLE_PAGESPEED')) - tests.extend(testsites(sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_PAGE_NOT_FOUND): - print(_('TEXT_TEST_PAGE_NOT_FOUND')) - tests.extend(testsites(sites, test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_PAGE_NOT_FOUND, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_SEO): + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_SEO, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE): + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_HTML): - print(_('TEXT_TEST_HTML')) - tests.extend(testsites(sites, test_type=TEST_HTML, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_HTML, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_CSS): - print(_('TEXT_TEST_CSS')) - tests.extend(testsites(sites, test_type=TEST_CSS, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_CSS, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_PWA): + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_PWA, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_STANDARD_FILES): + tests.extend(testsites(langCode, sites, + test_type=TEST_STANDARD_FILES, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_GOOGLE_LIGHTHOUSE_A11Y): + tests.extend(testsites( + langCode, sites, test_type=TEST_GOOGLE_LIGHTHOUSE_A11Y, show_reviews=show_reviews)) + if (test_type == TEST_ALL or test_type == TEST_YELLOW_LAB_TOOLS): + tests.extend(testsites( + langCode, sites, test_type=TEST_YELLOW_LAB_TOOLS, show_reviews=show_reviews)) if (test_type == TEST_ALL or test_type == TEST_WEBBKOLL): - print(_('TEXT_TEST_WEBBKOLL')) - tests.extend(testsites(sites, test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) + tests.extend(testsites(langCode, sites, + test_type=TEST_WEBBKOLL, show_reviews=show_reviews)) + return tests + def validate_test_type(test_type): - if test_type != TEST_HTML and test_type != TEST_PAGE_NOT_FOUND and test_type != TEST_CSS and test_type != TEST_WEBBKOLL and test_type != TEST_GOOGLE_LIGHTHOUSE: + if test_type != TEST_HTML and test_type != TEST_PAGE_NOT_FOUND and test_type != TEST_CSS and test_type != TEST_WEBBKOLL and test_type != TEST_GOOGLE_LIGHTHOUSE and test_type != TEST_GOOGLE_LIGHTHOUSE_PWA and test_type != TEST_GOOGLE_LIGHTHOUSE_A11Y and test_type != TEST_GOOGLE_LIGHTHOUSE_SEO and test_type != TEST_GOOGLE_LIGHTHOUSE_BEST_PRACTICE and test_type != TEST_STANDARD_FILES and test_type != TEST_YELLOW_LAB_TOOLS: print(_('TEXT_TEST_VALID_ARGUMENTS')) - print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_PAGESPEED')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE')) print(_('TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE')) print(_('TEXT_TEST_VALID_ARGUMENTS_HTML')) print(_('TEXT_TEST_VALID_ARGUMENTS_CSS')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA')) + print(_('TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES')) + print(_('TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y')) + print(_('TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS')) print(_('TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL')) return -2 else: return test_type + def main(argv): """ WebPerf Core @@ -118,7 +165,7 @@ def main(argv): Options and arguments: -h/--help\t\t\t: Help information on how to use script -u/--url \t\t: website url to test against - -t/--test <1/2/6/7/20>\t: runs ONE specific test against website(s) + -t/--test <1/2/4/5/6/7/8/9/10/20>\t: runs ONE specific test against website(s) -r/--review\t\t\t: show reviews in terminal -i/--input \t: input file path (.json/.sqlite) -o/--output \t: output file path (.json/.csv/.sql/.sqlite) @@ -131,15 +178,25 @@ def main(argv): sites = list() output_filename = '' input_filename = '' + input_skip = 0 + input_take = -1 show_reviews = False show_help = False add_url = '' delete_url = '' langCode = 'en' + language = False global _ + # add support for default (en) language + language = gettext.translation( + 'webperf-core', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + try: - opts, args = getopt.getopt(argv,"hu:t:i:o:rA:D:L:",["help","url","test", "input", "output", "review", "report", "addUrl", "deleteUrl", "language"]) + opts, args = getopt.getopt(argv, "hu:t:i:o:rA:D:L:", [ + "help", "url=", "test=", "input=", "output=", "review", "report", "addUrl=", "deleteUrl=", "language=", "input-skip=", "input-take="]) except getopt.GetoptError: print(main.__doc__) sys.exit(2) @@ -148,17 +205,17 @@ def main(argv): show_help = True for opt, arg in opts: - if opt in ('-h', '--help'): # help + if opt in ('-h', '--help'): # help show_help = True - elif opt in ("-u", "--url"): # site url + elif opt in ("-u", "--url"): # site url sites.append([0, arg]) - elif opt in ("-A", "--addUrl"): # site url + elif opt in ("-A", "--addUrl"): # site url add_url = arg - elif opt in ("-D", "--deleteUrl"): # site url + elif opt in ("-D", "--deleteUrl"): # site url delete_url = arg - elif opt in ("-L", "--language"): # language code + elif opt in ("-L", "--language"): # language code # loop all available languages and verify language exist - import os + import os availableLanguages = list() localeDirs = os.listdir('locales') foundLang = False @@ -167,7 +224,8 @@ def main(argv): if (localeName[0:1] == '.'): continue - languageSubDirectory = os.path.join('locales', localeName, "LC_MESSAGES") + languageSubDirectory = os.path.join( + 'locales', localeName, "LC_MESSAGES") if (os.path.exists(languageSubDirectory)): availableLanguages.append(localeName) @@ -176,11 +234,17 @@ def main(argv): langCode = arg foundLang = True + language = gettext.translation( + 'webperf-core', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + if (not foundLang): # Not translateable - print('Language not found, only the following languages are available:', availableLanguages) + print( + 'Language not found, only the following languages are available:', availableLanguages) sys.exit(2) - elif opt in ("-t", "--test"): # test type + elif opt in ("-t", "--test"): # test type try: tmp_test_type = int(arg) test_type = validate_test_type(tmp_test_type) @@ -189,34 +253,53 @@ def main(argv): except Exception: validate_test_type(arg) sys.exit(2) - elif opt in ("-i", "--input"): # input file path + elif opt in ("-i", "--input"): # input file path input_filename = arg + + file_ending = "" file_long_ending = "" + if (len(input_filename) > 4): + file_ending = input_filename[-4:].lower() if (len(input_filename) > 7): file_long_ending = input_filename[-7:].lower() - if file_long_ending == ".sqlite": + if file_long_ending == ".sqlite": from engines.sqlite import read_sites, add_site, delete_site + elif (file_ending == ".csv"): + from engines.csv import read_sites, add_site, delete_site + elif (file_ending == ".xml"): # https://example.com/sitemap.xml + from engines.sitemap import read_sites, add_site, delete_site else: from engines.json import read_sites, add_site, delete_site - sites = read_sites(input_filename) pass - elif opt in ("-o", "--output"): # output file path + elif opt in ("--input-skip"): # specifies number of items to skip in the begining + try: + input_skip = int(arg) + except Exception: + print(_('TEXT_COMMAND_USAGE')) + sys.exit(2) + pass + elif opt in ("--input-take"): # specifies number of items to take + try: + input_take = int(arg) + except Exception: + print(_('TEXT_COMMAND_USAGE')) + sys.exit(2) + pass + elif opt in ("-o", "--output"): # output file path output_filename = arg pass - elif opt in ("-r", "--review", "--report"): # writes reviews directly in terminal + elif opt in ("-r", "--review", "--report"): # writes reviews directly in terminal show_reviews = True pass - # add support for language - language = gettext.translation('webperf-core', localedir='locales', languages=[langCode]) - language.install() - _ = language.gettext - if (show_help): print(_('TEXT_COMMAND_USAGE')) sys.exit(2) + if (input_filename != ''): + sites = read_sites(input_filename, input_skip, input_take) + if (add_url != ''): # check if website url should be added sites = add_site(input_filename, add_url) @@ -225,7 +308,8 @@ def main(argv): sites = delete_site(input_filename, delete_url) elif (len(sites)): # run test(s) for every website - siteTests = testing(sites, test_type=test_type, show_reviews=show_reviews) + siteTests = testing( + langCode, sites, test_type=test_type, show_reviews=show_reviews) if (len(output_filename) > 0): file_ending = "" file_long_ending = "" @@ -252,4 +336,4 @@ def main(argv): If file is executed on itself then call a definition, mostly for testing purposes """ if __name__ == '__main__': - main(sys.argv[1:]) \ No newline at end of file + main(sys.argv[1:]) diff --git a/engines/csv.py b/engines/csv.py index a535cbce..452e9fca 100644 --- a/engines/csv.py +++ b/engines/csv.py @@ -1,10 +1,82 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- from models import Sites, SiteTests +from engines.utils import use_website import csv + def write_tests(output_filename, siteTests): with open(output_filename, 'w', newline='') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=SiteTests.fieldnames()) writer.writeheader() - writer.writerows(siteTests) \ No newline at end of file + writer.writerows(siteTests) + + +def add_site(input_filename, url): + sites = read_sites(input_filename, 0, -1) + # print(sites) + id = len(sites) + sites.append([id, url]) + write_sites(input_filename, sites) + + print(_('TEXT_WEBSITE_URL_ADDED').format(url)) + + return sites + + +def delete_site(input_filename, url): + sites = read_sites(input_filename, 0, -1) + tmpSites = list() + for site in sites: + site_id = site[0] + site_url = site[1] + if (url != site_url): + tmpSites.append([site_id, site_url]) + + write_sites(input_filename, tmpSites) + + print(_('TEXT_WEBSITE_URL_DELETED').format(site_url)) + + return tmpSites + + +def read_sites(input_filename, input_skip, input_take): + sites = list() + + with open(input_filename, newline='') as csvfile: + dialect = csv.Sniffer().sniff(csvfile.read(1024)) + csvfile.seek(0) + reader = csv.reader(csvfile, dialect) + + with open(input_filename, newline='') as csvfile: + csv_reader = csv.reader(csvfile, delimiter=',', quotechar='|') + current_index = 0 + for row in csv_reader: + number_of_fields = len(Sites.fieldnames()) + current_number_of_fields = len(row) + if number_of_fields == current_number_of_fields: + # ignore first row as that is our header info + if current_index != 0 and use_website(current_index + 1, input_skip, input_take): + sites.append([row[0], row[1]]) + elif current_number_of_fields == 1: + # we have no header and only one colmn, use column as website url + if use_website(current_index, input_skip, input_take): + sites.append([current_index, "".join(row)]) + current_index += 1 + + return sites + + +def write_sites(output_filename, sites): + sites_output = list() + for site in sites: + site_id = site[0] + site_url = site[1] + site_object = Sites(id=site_id, website=site_url).todata() + sites_output.append(site_object) + + with open("output-" + output_filename, 'w', newline='') as csvfile: + writer = csv.DictWriter(csvfile, fieldnames=Sites.fieldnames()) + + writer.writeheader() + writer.writerows(sites_output) diff --git a/engines/json.py b/engines/json.py index d06bde4f..71430fa6 100644 --- a/engines/json.py +++ b/engines/json.py @@ -1,40 +1,51 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- +from engines.utils import use_website import json + def add_site(input_filename, url): - sites = read_sites(input_filename) - print(sites) + sites = read_sites(input_filename, 0, -1) + # print(sites) id = len(sites) sites.append([id, url]) write_sites(input_filename, sites) - print("website with url: " + url + " has been added\n") + print(_('TEXT_WEBSITE_URL_ADDED').format(url)) return sites + def delete_site(input_filename, url): - sites = read_sites(input_filename) + sites = read_sites(input_filename, 0, -1) tmpSites = list() for site in sites: site_id = site[0] site_url = site[1] if (url != site_url): tmpSites.append([site_id, site_url]) - + write_sites(input_filename, tmpSites) - print("website with url: " + url + " has been deleted\n") - + print(_('TEXT_WEBSITE_URL_DELETED').format(site_url)) + return tmpSites -def read_sites(input_filename): + +def read_sites(input_filename, input_skip, input_take): + + print('A') + sites = list() with open(input_filename) as json_input_file: data = json.load(json_input_file) + current_index = 0 for site in data["sites"]: - sites.append([site["id"], site["url"]]) + if use_website(current_index, input_skip, input_take): + sites.append([site["id"], site["url"]]) + current_index += 1 return sites + def write_tests(output_filename, siteTests): with open(output_filename, 'w') as outfile: # json require us to have an object as root element @@ -43,18 +54,20 @@ def write_tests(output_filename, siteTests): } json.dump(testsContainerObject, outfile) + def write_sites(output_filename, sites): with open(output_filename, 'w') as outfile: # json require us to have an object as root element jsonSites = list() + current_siteid = 0 for site in sites: jsonSites.append({ - 'id': site[0], - 'url': site[1] - }) + 'id': site[0], + 'url': site[1] + }) + current_siteid += 1 sitesContainerObject = { "sites": jsonSites } json.dump(sitesContainerObject, outfile) - diff --git a/engines/sitemap.py b/engines/sitemap.py new file mode 100644 index 00000000..39373ead --- /dev/null +++ b/engines/sitemap.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +from models import Sites, SiteTests +from engines.utils import use_website +import config +from tests.utils import * +import re + + +def read_sites(input_sitemap_url, input_skip, input_take): + sites = list() + + sitemap_content = httpRequestGetContent(input_sitemap_url) + + regex = r"(?P[^<]+)<" + matches = re.finditer(regex, sitemap_content, re.MULTILINE) + + current_index = 0 + for matchNum, match in enumerate(matches, start=1): + + item_url = match.group('itemurl') + + if use_website(current_index, input_skip, input_take): + sites.append([current_index, item_url]) + current_index += 1 + return sites + + +def add_site(input_filename, url): + print("WARNING: sitemap engine is a read only method for testing all pages in a sitemap.xml, NO changes will be made") + + sites = read_sites(input_filename, 0, -1) + + return sites + + +def delete_site(input_filename, url): + print("WARNING: sitemap engine is a read only method for testing all pages in a sitemap.xml, NO changes will be made") + + sites = read_sites(input_filename, 0, -1) + + return sites diff --git a/engines/sql.py b/engines/sql.py index 4df0131f..33275702 100644 --- a/engines/sql.py +++ b/engines/sql.py @@ -1,10 +1,16 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- +from engines.utils import use_website -def write_tests(output_filename, siteTests): + +def write_tests(output_filename, siteTests, input_skip, input_take): with open(output_filename, 'w') as outfile: + current_index = 0 for test in siteTests: - format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating) - VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n""" - sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], report=test["report"], json=test["data"], recent=1, rating=test["rating"]) - - outfile.write(sql_command) \ No newline at end of file + if use_website(current_index, input_skip, input_take): + format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating) + VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n""" + sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], + report=test["report"], json=test["data"], recent=1, rating=test["rating"]) + + current_index += 1 + outfile.write(sql_command) diff --git a/engines/sqlite.py b/engines/sqlite.py index c60ddff5..1a7f3e8c 100644 --- a/engines/sqlite.py +++ b/engines/sqlite.py @@ -1,6 +1,8 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- +from engines.utils import use_website import sqlite3 + def db_tables(output_filename): conn = sqlite3.connect(output_filename) c = conn.cursor() @@ -12,6 +14,7 @@ def db_tables(output_filename): conn.close() + def add_site(input_filename, url): conn = sqlite3.connect(input_filename) c = conn.cursor() @@ -24,10 +27,11 @@ def add_site(input_filename, url): conn.close() - print("website with url: " + url + " has been added\n") + print(_('TEXT_WEBSITE_URL_ADDED').format(url)) return read_sites(input_filename) + def delete_site(input_filename, url): conn = sqlite3.connect(input_filename) c = conn.cursor() @@ -39,22 +43,27 @@ def delete_site(input_filename, url): conn.close() - print("website with url: " + url + " has been deleted\n") + print(_('TEXT_WEBSITE_URL_DELETED').format(url)) - return read_sites(input_filename) + return read_sites(input_filename, 0, -1) -def read_sites(input_filename): + +def read_sites(input_filename, input_skip, input_take): sites = list() - order_by='title ASC' + order_by = 'title ASC' conn = sqlite3.connect(input_filename) c = conn.cursor() + current_index = 0 for row in c.execute('SELECT id, website FROM sites WHERE active=1 ORDER BY {0}'.format(order_by)): - sites.append([row[0], row[1]]) + if use_website(current_index, input_skip, input_take): + sites.append([row[0], row[1]]) + current_index += 1 conn.close() return sites + def write_tests(output_filename, siteTests): conn = sqlite3.connect(output_filename) c = conn.cursor() @@ -62,7 +71,8 @@ def write_tests(output_filename, siteTests): for test in siteTests: # set previous testresult as not latest format_str = """UPDATE sitetests SET most_recent=0 WHERE site_id="{siteid}" AND type_of_test="{testtype}" AND most_recent=1;\n""" - sql_command = format_str.format(siteid=test["site_id"], testtype=test["type_of_test"]) + sql_command = format_str.format( + siteid=test["site_id"], testtype=test["type_of_test"]) c.execute(sql_command) conn.commit() @@ -70,9 +80,10 @@ def write_tests(output_filename, siteTests): # update testresult for all sites format_str = """INSERT INTO sitetests (site_id, test_date, type_of_test, check_report, json_check_data, most_recent, rating) VALUES ("{siteid}", "{testdate}", "{testtype}", "{report}", "{json}", "{recent}", "{rating}");\n""" - sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], report=test["report"], json=test["data"], recent=1, rating=test["rating"]) + sql_command = format_str.format(siteid=test["site_id"], testdate=test["date"], testtype=test["type_of_test"], + report=test["report"], json=test["data"], recent=1, rating=test["rating"]) c.execute(sql_command) conn.commit() - conn.close() \ No newline at end of file + conn.close() diff --git a/engines/utils.py b/engines/utils.py new file mode 100644 index 00000000..4edc9cb5 --- /dev/null +++ b/engines/utils.py @@ -0,0 +1,11 @@ +# -*- coding: utf-8 -*- + + +def use_website(current_index, skip, take): + if skip > 0 and current_index < skip: + return False + + if take != -1 and current_index >= (skip + take): + return False + + return True diff --git a/locales/en/LC_MESSAGES/a11y_lighthouse.mo b/locales/en/LC_MESSAGES/a11y_lighthouse.mo new file mode 100644 index 00000000..ab278885 Binary files /dev/null and b/locales/en/LC_MESSAGES/a11y_lighthouse.mo differ diff --git a/locales/en/LC_MESSAGES/a11y_lighthouse.po b/locales/en/LC_MESSAGES/a11y_lighthouse.po new file mode 100644 index 00000000..deeac26c --- /dev/null +++ b/locales/en/LC_MESSAGES/a11y_lighthouse.po @@ -0,0 +1,36 @@ +# English +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 10 - Accessibility (Google Lighthouse)" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* The website do not have any apparent issues with accessibility!!\n" + +msgid "TEXT_REVIEW_A11Y_IS_GOOD" +msgstr "* The website can be more accessible, but is rather good!\n" + +msgid "TEXT_REVIEW_A11Y_IS_OK" +msgstr "* The accessibility is average, but need to get better.\n" + +msgid "TEXT_REVIEW_A11Y_IS_BAD" +msgstr "* The website is quite bad at accessibility and sucks for disabled people!\n" + +msgid "TEXT_REVIEW_A11Y_IS_VERY_BAD" +msgstr "* The accessibility is apparently really bad!\n" + +msgid "TEXT_REVIEW_A11Y_NUMBER_OF_PROBLEMS" +msgstr "* Number of problems with accessibility: {}\n" \ No newline at end of file diff --git a/locales/en/LC_MESSAGES/best_practice_lighthouse.mo b/locales/en/LC_MESSAGES/best_practice_lighthouse.mo new file mode 100644 index 00000000..60d1e95d Binary files /dev/null and b/locales/en/LC_MESSAGES/best_practice_lighthouse.mo differ diff --git a/locales/en/LC_MESSAGES/best_practice_lighthouse.po b/locales/en/LC_MESSAGES/best_practice_lighthouse.po new file mode 100644 index 00000000..a61ca740 --- /dev/null +++ b/locales/en/LC_MESSAGES/best_practice_lighthouse.po @@ -0,0 +1,39 @@ +# English +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 5 - Best Practice (Google Lighthouse)" + +msgid "TEXT_REVIEW_PRACTICE_VERY_GOOD" +msgstr "* The website is within good practice according to Google Lighthouse!\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_GOOD" +msgstr "* The website can still improve some on best practice.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_OK" +msgstr "* About average on following good practice\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_BAD" +msgstr "* The website is pretty bad on what is considered good practice.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_VERY_BAD" +msgstr "* The website is really bad on adhering to good practice for the web!\n" + +msgid "TEXT_REVIEW_PRACTICE_NUMBER_OF_PROBLEMS" +msgstr "* Number of problem(s) regarding good practice: {}\n" + +msgid "TEXT_REVIEW_PRACTICE_PROBLEMS" +msgstr "\nProblem(s):\n" \ No newline at end of file diff --git a/locales/en/LC_MESSAGES/css_validator_w3c.mo b/locales/en/LC_MESSAGES/css_validator_w3c.mo new file mode 100644 index 00000000..a32ec522 Binary files /dev/null and b/locales/en/LC_MESSAGES/css_validator_w3c.mo differ diff --git a/locales/en/LC_MESSAGES/css_validator_w3c.po b/locales/en/LC_MESSAGES/css_validator_w3c.po new file mode 100644 index 00000000..bc12d1e2 --- /dev/null +++ b/locales/en/LC_MESSAGES/css_validator_w3c.po @@ -0,0 +1,34 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 7 - CSS\n###############################" + +msgid "TEXT_REVIEW_CSS_VERY_GOOD" +msgstr "* No errors in the CSS code.\n" + +msgid "TEXT_REVIEW_CSS_IS_GOOD" +msgstr "* The tested page has {0} errors in its CSS code.\n" + +msgid "TEXT_REVIEW_CSS_IS_OK" +msgstr "* The tested page has {0} errors in its CSS code.\n" + +msgid "TEXT_REVIEW_CSS_IS_BAD" +msgstr "* The tested page has {0} errors in its CSS code. It is not so good.\n" + +msgid "TEXT_REVIEW_CSS_IS_VERY_BAD" +msgstr "* The tested page has lots of errors in its CSS code. A total of {0}. \n" diff --git a/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo b/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo new file mode 100644 index 00000000..424cb27b Binary files /dev/null and b/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo differ diff --git a/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.po b/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.po new file mode 100644 index 00000000..c74fbfe9 --- /dev/null +++ b/locales/en/LC_MESSAGES/frontend_quality_yellow_lab_tools.po @@ -0,0 +1,73 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-11 16:14+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 17 - Quality on frontend (Yellow Lab Tools)\n###############################" + +msgid "TEXT_WEBSITE_IS_VERY_GOOD" +msgstr "* The website is well built!\n" + +msgid "TEXT_WEBSITE_IS_GOOD" +msgstr "* The website is good.\n" + +msgid "TEXT_WEBSITE_IS_OK" +msgstr "* The website is not good or bad.\n" + +msgid "TEXT_WEBSITE_IS_BAD" +msgstr "* The website is quite slow or has a bad front-end code.\n" + +msgid "TEXT_WEBSITE_IS_VERY_BAD" +msgstr "* Very bad rating according to Yellow Lab Tools.\n" + +msgid "TEXT_OVERALL_GRADE" +msgstr "* Overall rating: {} out of 100\n" + +msgid "TEXT_TESTED_ON_DEVICETYPE" +msgstr "* Tested for device type: {}\n" + +msgid "TEXT_PAGE_WEIGHT" +msgstr "* page weight: {}\n" + +msgid "TEXT_PAGE_REQUESTS" +msgstr "* Number of page requests: {}\n" + +msgid "TEXT_PAGE_DOM_COMPLEXITY" +msgstr "* Page DOM complexity: {}\n" + +msgid "TEXT_PAGE_DOM_MANIPULATIONS" +msgstr "* Page DOM manipulations: {}\n" + +msgid "TEXT_PAGE_SCROLL" +msgstr "* Page scroll: {}\n" + +msgid "TEXT_PAGE_BAD_JS" +msgstr "* Page use of bad javascript: {}\n" + +msgid "TEXT_PAGE_JQUERY" +msgstr "* Page use of jQuery: {}\n" + +msgid "TEXT_PAGE_CSS_COMPLEXITY" +msgstr "* Page CSS complexity: {}\n" + +msgid "TEXT_PAGE_BAD_CSS" +msgstr "* Page use of bad CSS: {}\n" + +msgid "TEXT_PAGE_FONTS" +msgstr "* Page use of custom webfonts: {}\n" + +msgid "TEXT_SERVER_CONFIG" +msgstr "* Server configuration rating: {}\n" \ No newline at end of file diff --git a/locales/en/LC_MESSAGES/html_validator_w3c.mo b/locales/en/LC_MESSAGES/html_validator_w3c.mo new file mode 100644 index 00000000..5151d1e8 Binary files /dev/null and b/locales/en/LC_MESSAGES/html_validator_w3c.mo differ diff --git a/locales/en/LC_MESSAGES/html_validator_w3c.po b/locales/en/LC_MESSAGES/html_validator_w3c.po new file mode 100644 index 00000000..74720505 --- /dev/null +++ b/locales/en/LC_MESSAGES/html_validator_w3c.po @@ -0,0 +1,34 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:15+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 6 - HTML\n###############################" + +msgid "TEXT_REVIEW_HTML_VERY_GOOD" +msgstr "* No errors in the HTML code.\n" + +msgid "TEXT_REVIEW_HTML_IS_GOOD" +msgstr "* The tested page has {0} errors in its HTML code.\n" + +msgid "TEXT_REVIEW_HTML_IS_OK" +msgstr "* The tested page has {0} errors in its HTML code.\n" + +msgid "TEXT_REVIEW_HTML_IS_BAD" +msgstr "* The tested page has {0} errors in its HTML code. It is not so good.\n" + +msgid "TEXT_REVIEW_HTML_IS_VERY_BAD" +msgstr "* The tested page has lots of errors in its HTML code. A total of {0}. \n" diff --git a/locales/en/LC_MESSAGES/page_not_found.mo b/locales/en/LC_MESSAGES/page_not_found.mo new file mode 100644 index 00000000..4b2ef7ec Binary files /dev/null and b/locales/en/LC_MESSAGES/page_not_found.mo differ diff --git a/locales/en/LC_MESSAGES/page_not_found.po b/locales/en/LC_MESSAGES/page_not_found.po new file mode 100644 index 00000000..20d58781 --- /dev/null +++ b/locales/en/LC_MESSAGES/page_not_found.po @@ -0,0 +1,37 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:00+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 2 - 404 (Page not Found)\n###############################" + +msgid "TEXT_REVIEW_WRONG_STATUS_CODE" +msgstr "* Wrong status code. Got {0} when 404 would be correct.\n" + +msgid "TEXT_REVIEW_NO_TITLE" +msgstr "* Found no page title in the page metadata.\n" + +msgid "TEXT_REVIEW_MAIN_HEADER" +msgstr "* Found no headline (h1)\n" + +msgid "TEXT_REVIEW_NO_SWEDISH_ERROR_MSG" +msgstr "* Seems to lack text describing that an error has occurred (in Swedish).\n" + +msgid "TEXT_REVIEW_ERROR_MSG_UNDER_150" +msgstr "* Text content length is below 150 characters, indicating that the user is not being referred.\n" + +msgid "TEXT_REVIEW_NO_REMARKS" +msgstr "* No remarks" diff --git a/locales/en/LC_MESSAGES/performance_lighthouse.mo b/locales/en/LC_MESSAGES/performance_lighthouse.mo new file mode 100644 index 00000000..fbb456ff Binary files /dev/null and b/locales/en/LC_MESSAGES/performance_lighthouse.mo differ diff --git a/locales/en/LC_MESSAGES/performance_lighthouse.po b/locales/en/LC_MESSAGES/performance_lighthouse.po new file mode 100644 index 00000000..c71241cd --- /dev/null +++ b/locales/en/LC_MESSAGES/performance_lighthouse.po @@ -0,0 +1,56 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 22:00+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 1 - Performance (Google Lighthouse)\n###############################" + +msgid "TEXT_REVIEW_VERY_GOOD" +msgstr "* The website loads very fast!\n" + +msgid "TEXT_REVIEW_IS_GOOD" +msgstr "* The website is fast.\n" + +msgid "TEXT_REVIEW_IS_OK" +msgstr "* Average speed.\n" + +msgid "TEXT_REVIEW_IS_BAD" +msgstr "* The website is quite slow.\n" + +msgid "TEXT_REVIEW_IS_VERY_BAD" +msgstr "* The website is very slow!\n" + +msgid "TEXT_REVIEW_OBSERVED_SPEED" +msgstr "* Observed speed: {} seconds\n" + +msgid "TEXT_REVIEW_FIRST_MEANINGFUL_PAINT" +msgstr "* First meaningful visual change: {} sec\n" + +msgid "TEXT_REVIEW_FIRST_MEANINGFUL_PAINT_3G" +msgstr "* First meaningful visual change on 3G: {} sec\n" + +msgid "TEXT_REVIEW_CPU_IDLE" +msgstr "* CPU idle after: {} sec\n" + +msgid "TEXT_REVIEW_INTERACTIVE" +msgstr "* The website is interactive: {} sec\n" + +msgid "TEXT_REVIEW_REDIRECTS" +msgstr "* Time spent for redirects: {} sec\n" + +msgid "TEXT_REVIEW_TOTAL_WEIGHT" +msgstr "* Total weight of the page: {} kb\n" + diff --git a/locales/en/LC_MESSAGES/privacy_webbkollen.mo b/locales/en/LC_MESSAGES/privacy_webbkollen.mo new file mode 100644 index 00000000..4c838ee1 Binary files /dev/null and b/locales/en/LC_MESSAGES/privacy_webbkollen.mo differ diff --git a/locales/en/LC_MESSAGES/privacy_webbkollen.po b/locales/en/LC_MESSAGES/privacy_webbkollen.po new file mode 100644 index 00000000..fa2ff0f4 --- /dev/null +++ b/locales/en/LC_MESSAGES/privacy_webbkollen.po @@ -0,0 +1,34 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 20:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 20 - Privacy (Webbkollen)\n###############################" + +msgid "TEXT_REVIEW_VERY_GOOD" +msgstr "* The website is good on privacy!\n" + +msgid "TEXT_REVIEW_IS_GOOD" +msgstr "* The site could be better, but is ok.\n" + +msgid "TEXT_REVIEW_IS_OK" +msgstr "* Ok integrity but should get better.\n" + +msgid "TEXT_REVIEW_IS_BAD" +msgstr "* Poor integrity.\n" + +msgid "TEXT_REVIEW_IS_VERY_BAD" +msgstr "* Very bad privacy!\n" diff --git a/locales/en/LC_MESSAGES/pwa_lighthouse.mo b/locales/en/LC_MESSAGES/pwa_lighthouse.mo new file mode 100644 index 00000000..5414a1f2 Binary files /dev/null and b/locales/en/LC_MESSAGES/pwa_lighthouse.mo differ diff --git a/locales/en/LC_MESSAGES/pwa_lighthouse.po b/locales/en/LC_MESSAGES/pwa_lighthouse.po new file mode 100644 index 00000000..488f26b5 --- /dev/null +++ b/locales/en/LC_MESSAGES/pwa_lighthouse.po @@ -0,0 +1,39 @@ +# English +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 8 - PWA (Google Lighthouse)" + +msgid "TEXT_REVIEW_PWA_VERY_GOOD" +msgstr "* The website is all out progressive web app!\n" + +msgid "TEXT_REVIEW_PWA_IS_GOOD" +msgstr "* Pretty good on the requirements of progressive web apps!\n" + +msgid "TEXT_REVIEW_PWA_IS_OK" +msgstr "* About average support of Progressive Web App technology!\n" + +msgid "TEXT_REVIEW_PWA_IS_BAD" +msgstr "* Pretty bad as a progressive web app (PWA)!\n" + +msgid "TEXT_REVIEW_PWA_IS_VERY_BAD" +msgstr "* Really bad as a progressive web app (PWA) :/\n" + +msgid "TEXT_REVIEW_PWA_NUMBER_OF_PROBLEMS" +msgstr "* Number of problems regarding progressive web apps: {} st\n" + +msgid "TEXT_REVIEW_PWA_PROBLEMS" +msgstr "\nProblem(s):\n" \ No newline at end of file diff --git a/locales/en/LC_MESSAGES/seo_lighthouse.mo b/locales/en/LC_MESSAGES/seo_lighthouse.mo new file mode 100644 index 00000000..10554e80 Binary files /dev/null and b/locales/en/LC_MESSAGES/seo_lighthouse.mo differ diff --git a/locales/en/LC_MESSAGES/seo_lighthouse.po b/locales/en/LC_MESSAGES/seo_lighthouse.po new file mode 100644 index 00000000..b0cb9335 --- /dev/null +++ b/locales/en/LC_MESSAGES/seo_lighthouse.po @@ -0,0 +1,39 @@ +# English +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 4 - SEO (Google Lighthouse)" + +msgid "TEXT_REVIEW_SEO_VERY_GOOD" +msgstr "* The website is really good at SEO!!\n" + +msgid "TEXT_REVIEW_SEO_IS_GOOD" +msgstr "* Pretty good at SEO, but still has some potential improvements.\n" + +msgid "TEXT_REVIEW_SEO_IS_OK" +msgstr "* About average on search engine optimisation (SEO) according to Google.\n" + +msgid "TEXT_REVIEW_SEO_IS_BAD" +msgstr "* Pretty bad optimized for search engines (SEO).\n" + +msgid "TEXT_REVIEW_SEO_IS_VERY_BAD" +msgstr "* Really bad at SEO (Search Engine Optimisation)!\n" + +msgid "TEXT_REVIEW_SEO_NUMBER_OF_PROBLEMS" +msgstr "* Number of problem(s) with search engine optmisiation (SEO): {}\n" + +msgid "TEXT_REVIEW_SEO_PROBLEMS" +msgstr "\nProblem(s):\n" \ No newline at end of file diff --git a/locales/en/LC_MESSAGES/standard_files.mo b/locales/en/LC_MESSAGES/standard_files.mo new file mode 100644 index 00000000..c1e987ed Binary files /dev/null and b/locales/en/LC_MESSAGES/standard_files.mo differ diff --git a/locales/en/LC_MESSAGES/standard_files.po b/locales/en/LC_MESSAGES/standard_files.po new file mode 100644 index 00000000..1c6039c5 --- /dev/null +++ b/locales/en/LC_MESSAGES/standard_files.po @@ -0,0 +1,58 @@ +# English (default). +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 11:15+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: English \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nRunning test: 9 - Standard files\n###############################" + +msgid "TEXT_ROBOTS_MISSING" +msgstr "* robots.txt is missing, not allowed to download or has no expected content.\n" + +msgid "TEXT_ROBOTS_OK" +msgstr "* robots.txt seems ok.\n" + +msgid "TEXT_SITEMAP_MISSING" +msgstr "* Sitemap is not specified in robots.txt\n" + +msgid "TEXT_SITEMAP_FOUND" +msgstr "* Sitemap is mentioned in robots.txt\n" + +msgid "TEXT_SITEMAP_BROKEN" +msgstr "* Sitemap seems to be broken.\n" + +msgid "TEXT_SITEMAP_OK" +msgstr "* Sitemap seems to work.\n" + +msgid "TEXT_RSS_FEED_MISSING" +msgstr "* RSS subscription is missing in meta.\n" + +msgid "TEXT_RSS_FEED_FOUND" +msgstr "* RSS subscription found.\n" + +msgid "TEXT_SECURITY_MISSING" +msgstr "* security.txt is missing.\n" + +msgid "TEXT_SECURITY_WRONG_CONTENT" +msgstr "* security.txt has wrong content.\n" + +msgid "TEXT_SECURITY_OK_CONTENT" +msgstr "* security.txt seems to work.\n" + +msgid "TEXT_SECURITY_REQUIRED_CONTACT_MISSING" +msgstr "* security.txt required contact is missing.\n" + +msgid "TEXT_SECURITY_REQUIRED_EXPIRES_MISSING" +msgstr "* security.txt required expires (added in draft v10) is missing.\n" diff --git a/locales/en/LC_MESSAGES/webperf-core.mo b/locales/en/LC_MESSAGES/webperf-core.mo index 04e822fc..6daa918d 100644 Binary files a/locales/en/LC_MESSAGES/webperf-core.mo and b/locales/en/LC_MESSAGES/webperf-core.mo differ diff --git a/locales/en/LC_MESSAGES/webperf-core.po b/locales/en/LC_MESSAGES/webperf-core.po index 83907840..04b0c8d6 100644 --- a/locales/en/LC_MESSAGES/webperf-core.po +++ b/locales/en/LC_MESSAGES/webperf-core.po @@ -15,103 +15,88 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: checks.py:47 -msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" -msgstr "* Wrong status code. Got {0} when 404 would be correct.\n" +msgid "TEXT_WEBSITE_URL_ADDED" +msgstr "website with url: {0} has been added\n" -#: checks.py:67 -msgid "TEST_404_REVIEW_NO_TITLE" -msgstr "* Found no page title in the page metadata.\n" +msgid "TEXT_WEBSITE_URL_DELETED" +msgstr "website with url: {0} has been deleted\n" -#: checks.py:77 -msgid "TEST_404_REVIEW_MAIN_HEADER" -msgstr "* Found no headline (h1)\n" - -#: checks.py:135 -msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" -msgstr "* Seems to lack text describing that an error has occurred (in Swedish).\n" - -#: checks.py:143 -msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" -msgstr "* Text content length is below 150 characters, indicating that the user is not being referred.\n" - -#: checks.py:146 -msgid "TEST_REVIEW_NO_REMARKS" -msgstr "* No remarks" - -#: default.py:21 +#: default.py:24 msgid "TEXT_TEST_START_HEADER" msgstr "###############################################" -#: default.py:25 +#: default.py:28 msgid "TEXT_TESTING_NUMBER_OF_SITES" msgstr "Number of websites being tested {0}" -#: default.py:30 +#: default.py:33 msgid "TEXT_TESTING_SITE" msgstr "{0}. Testing website {1}" -#: default.py:46 +#: default.py:61 msgid "TEXT_SITE_RATING" msgstr "Rating: " -#: default.py:48 +#: default.py:63 msgid "TEXT_SITE_REVIEW" msgstr "Review:\n" -#: default.py:66 +#: default.py:81 msgid "TEXT_EXCEPTION" msgstr "Exception, someone should look at this!" -#: default.py:74 +#: default.py:89 msgid "TEXT_TESTING_START_HEADER" msgstr "### {0} ###" -#: default.py:78 -msgid "TEXT_TEST_GOOGLE_PAGESPEED" -msgstr "###############################\nRunning test: 0 - Google Pagespeed" +#: default.py:127 +msgid "TEXT_TEST_VALID_ARGUMENTS" +msgstr "Valid arguments for option -t/--test:" -#: default.py:81 -msgid "TEXT_TEST_PAGE_NOT_FOUND" -msgstr "###############################\nRunning test: 2 - 404 (Page not Found)" +#: default.py:128 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" +msgstr "-t 1\t: Performance (Google Lighthouse)" -#: default.py:84 -msgid "TEXT_TEST_HTML" -msgstr "###############################\nRunning test: 6 - HTML" +#: default.py:129 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" +msgstr "-t 4\t: SEO (Google Lighthouse)" -#: default.py:87 -msgid "TEXT_TEST_CSS" -msgstr "###############################\nRunning test: 7 - CSS" +#: default.py:130 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y" +msgstr "-t 10\t: Accessibility (Google Lighthouse)" -#: default.py:90 -msgid "TEXT_TEST_WEBBKOLL" -msgstr "###############################\nRunning test: 20 - Webbkoll" +#: default.py:131 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA" +msgstr "-t 8\t: PWA (Google Lighthouse)" -#: default.py:96 -msgid "TEXT_TEST_VALID_ARGUMENTS" -msgstr "Valid arguments for option -t/--test:" +#: default.py:132 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" +msgstr "-t 5\t: Best Practice (Google Lighthouse)" -#: default.py:97 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_PAGESPEED" -msgstr "-t 0\t: Google Pagespeed" +msgid "TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS" +msgstr "-t 17\t: Quality on frontend (Yellow Lab Tools)" -#: default.py:98 +#: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" msgstr "-t 2\t: 404 (Page not Found)" -#: default.py:99 +#: default.py:134 msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" -msgstr "-t 6\t: HTML" +msgstr "-t 6\t: HTML Validation" -#: default.py:100 +#: default.py:135 msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" -msgstr "-t 7\t: CSS" +msgstr "-t 7\t: CSS Validation" -#: default.py:101 +#: default.py:136 msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" -msgstr "-t 20\t: Webbkoll" +msgstr "-t 20\t: Integrity & Security (Webbkoll)" + +#: default.py:137 +msgid "TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES" +msgstr "-t 9\t: Standard files" #: default.py:137 default.py:141 default.py:146 default.py:213 msgid "TEXT_COMMAND_USAGE" -msgstr "\n\tWebPerf Core\n\n\tUsage:\ndefault.py -u https://webperf.se\n\n\tOptions and arguments:\n\t-h/--help\t\t\t: Help information on how to use script\n\t-u/--url \t\t: website url to test against\n\t-t/--test <1/2/6/7/20>\t\t: runs ONE specific test against website(s)\n\t-r/--review\t\t\t: show reviews in terminal\n\t-i/--input \t\t: input file path (.json/.sqlite)\n\t-o/--output \t\t: output file path (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: website url (required in compination with -i/--input)\n\t-D/--deleteUrl \t: website url (required in compination with -i/--input)\n\t-L/--language \t: language used for output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tUsage:\ndefault.py -u https://webperf.se\n\n\tOptions and arguments:\n\t-h/--help\t\t\t: Help information on how to use script\n\t-u/--url \t\t: website url to test against\n\t-t/--test \t\t: run ONE test (use ? to list available tests)\n\t-r/--review\t\t\t: show reviews in terminal\n\t-i/--input \t\t: input file path (.json/.sqlite)\n\t-i/--input-skip \t: number of items to skip\n\t-i/--input-take \t: number of items to take\n\t-o/--output \t\t: output file path (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: website url (required in compination with -i/--input)\n\t-D/--deleteUrl \t: website url (required in compination with -i/--input)\n\t-L/--language \t: language used for output(en = default/sv)" diff --git a/locales/sv/LC_MESSAGES/a11y_lighthouse.mo b/locales/sv/LC_MESSAGES/a11y_lighthouse.mo new file mode 100644 index 00000000..586ea905 Binary files /dev/null and b/locales/sv/LC_MESSAGES/a11y_lighthouse.mo differ diff --git a/locales/sv/LC_MESSAGES/a11y_lighthouse.po b/locales/sv/LC_MESSAGES/a11y_lighthouse.po new file mode 100644 index 00000000..ec59bcc7 --- /dev/null +++ b/locales/sv/LC_MESSAGES/a11y_lighthouse.po @@ -0,0 +1,36 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 10 - Tillgänglighet (Google Lighthouse)" + +msgid "TEXT_REVIEW_A11Y_VERY_GOOD" +msgstr "* Webbplatsen har inga uppenbara fel inom tillgänglighet!\n" + +msgid "TEXT_REVIEW_A11Y_IS_GOOD" +msgstr "* Webbplatsen kan bli mer tillgänglig, men är helt ok.\n" + +msgid "TEXT_REVIEW_A11Y_IS_OK" +msgstr "* Genomsnittlig tillgänglighet men behöver bli bättre.\n" + +msgid "TEXT_REVIEW_A11Y_IS_BAD" +msgstr "* Webbplatsen är dålig på tillgänglighet för funktions­varierade personer.\n" + +msgid "TEXT_REVIEW_A11Y_IS_VERY_BAD" +msgstr "* Väldigt dålig tillgänglighet!\n" + +msgid "TEXT_REVIEW_A11Y_NUMBER_OF_PROBLEMS" +msgstr "* Antal problem med tillgänglighet: {} st\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/best_practice_lighthouse.mo b/locales/sv/LC_MESSAGES/best_practice_lighthouse.mo new file mode 100644 index 00000000..0ee4e4e4 Binary files /dev/null and b/locales/sv/LC_MESSAGES/best_practice_lighthouse.mo differ diff --git a/locales/sv/LC_MESSAGES/best_practice_lighthouse.po b/locales/sv/LC_MESSAGES/best_practice_lighthouse.po new file mode 100644 index 00000000..6cb88a0b --- /dev/null +++ b/locales/sv/LC_MESSAGES/best_practice_lighthouse.po @@ -0,0 +1,39 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 5 - God praxis (Google Lighthouse)" + +msgid "TEXT_REVIEW_PRACTICE_VERY_GOOD" +msgstr "* Webbplatsen följer god praxis fullt ut!\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_GOOD" +msgstr "* Webbplatsen har ändå förbättrings­potential.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_OK" +msgstr "* Genomsnittlig efterlevnad till praxis.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_BAD" +msgstr "* Webbplatsen är ganska dålig på att följa god praxis.\n" + +msgid "TEXT_REVIEW_PRACTICE_IS_VERY_BAD" +msgstr "* Webbplatsen är inte alls bra på att följa praxis!\n" + +msgid "TEXT_REVIEW_PRACTICE_NUMBER_OF_PROBLEMS" +msgstr "* Antal problem med god praxis: {} st\n" + +msgid "TEXT_REVIEW_PRACTICE_PROBLEMS" +msgstr "\nProblem:\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/css_validator_w3c.mo b/locales/sv/LC_MESSAGES/css_validator_w3c.mo new file mode 100644 index 00000000..fab77dda Binary files /dev/null and b/locales/sv/LC_MESSAGES/css_validator_w3c.mo differ diff --git a/locales/sv/LC_MESSAGES/css_validator_w3c.po b/locales/sv/LC_MESSAGES/css_validator_w3c.po new file mode 100644 index 00000000..4aa24695 --- /dev/null +++ b/locales/sv/LC_MESSAGES/css_validator_w3c.po @@ -0,0 +1,34 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 7 - CSS\n###############################" + +msgid "TEXT_REVIEW_CSS_VERY_GOOD" +msgstr "* Inga fel i CSS-koden.\n" + +msgid "TEXT_REVIEW_CSS_IS_GOOD" +msgstr "* Den testade sidan har {0} st fel i sin CSS-kod.\n" + +msgid "TEXT_REVIEW_CSS_IS_OK" +msgstr "* Den testade sidan har {0} st fel i sin CSS-kod.\n" + +msgid "TEXT_REVIEW_CSS_IS_BAD" +msgstr "* Den testade sidan har {0} st fel i sin CSS-kod. Det är inte så bra.\n" + +msgid "TEXT_REVIEW_CSS_IS_VERY_BAD" +msgstr "* Den testade sidan har massor med fel i sin CSS-kod. Hela {0} st. \n" diff --git a/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo new file mode 100644 index 00000000..1957f4ac Binary files /dev/null and b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.mo differ diff --git a/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po new file mode 100644 index 00000000..25063814 --- /dev/null +++ b/locales/sv/LC_MESSAGES/frontend_quality_yellow_lab_tools.po @@ -0,0 +1,73 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-11 16:14+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 17 - Kvalitet på frontend (Yellow Lab Tools)\n###############################" + +msgid "TEXT_WEBSITE_IS_VERY_GOOD" +msgstr "* Webbplatsen är välbyggd!\n" + +msgid "TEXT_WEBSITE_IS_GOOD" +msgstr "* Webbplatsen är bra.\n" + +msgid "TEXT_WEBSITE_IS_OK" +msgstr "* Helt ok.\n" + +msgid "TEXT_WEBSITE_IS_BAD" +msgstr "* Webbplatsen är rätt långsam eller har dålig frontend-kod.\n" + +msgid "TEXT_WEBSITE_IS_VERY_BAD" +msgstr "* Väldigt dåligt betyg enligt Yellow Lab Tools!\n" + +msgid "TEXT_OVERALL_GRADE" +msgstr "* Övergripande betyg: {} av 100\n" + +msgid "TEXT_TESTED_ON_DEVICETYPE" +msgstr "* Testat för enhetstyp: {}\n" + +msgid "TEXT_PAGE_WEIGHT" +msgstr "* Sidans storlek: {}\n" + +msgid "TEXT_PAGE_REQUESTS" +msgstr "* Andelen resurser för sidan: {}\n" + +msgid "TEXT_PAGE_DOM_COMPLEXITY" +msgstr "* Sidans DOM komplexitet: {}\n" + +msgid "TEXT_PAGE_DOM_MANIPULATIONS" +msgstr "* Förändringar av sidans DOM: {}\n" + +msgid "TEXT_PAGE_SCROLL" +msgstr "* Sid skrollning: {}\n" + +msgid "TEXT_PAGE_BAD_JS" +msgstr "* Sidans användning av dålig javascript: {}\n" + +msgid "TEXT_PAGE_JQUERY" +msgstr "* Sidans användning av jQuery: {}\n" + +msgid "TEXT_PAGE_CSS_COMPLEXITY" +msgstr "* Sidans CSS komplexitet: {}\n" + +msgid "TEXT_PAGE_BAD_CSS" +msgstr "* Sidans användning av dålig CSS: {}\n" + +msgid "TEXT_PAGE_FONTS" +msgstr "* Sidans användning av webbtypsnitt: {}\n" + +msgid "TEXT_SERVER_CONFIG" +msgstr "* Server konfiguration: {}\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/html_validator_w3c.mo b/locales/sv/LC_MESSAGES/html_validator_w3c.mo new file mode 100644 index 00000000..af0eb060 Binary files /dev/null and b/locales/sv/LC_MESSAGES/html_validator_w3c.mo differ diff --git a/locales/sv/LC_MESSAGES/html_validator_w3c.po b/locales/sv/LC_MESSAGES/html_validator_w3c.po new file mode 100644 index 00000000..8a9bae82 --- /dev/null +++ b/locales/sv/LC_MESSAGES/html_validator_w3c.po @@ -0,0 +1,34 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:15+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 6 - HTML\n###############################" + +msgid "TEXT_REVIEW_HTML_VERY_GOOD" +msgstr "* Inga fel i HTML-koden.\n" + +msgid "TEXT_REVIEW_HTML_IS_GOOD" +msgstr "* Den testade sidan har {0} st fel i sin HTML-kod.\n" + +msgid "TEXT_REVIEW_HTML_IS_OK" +msgstr "* Den testade sidan har {0} st fel i sin HTML-kod.\n" + +msgid "TEXT_REVIEW_HTML_IS_BAD" +msgstr "* Den testade sidan har {0} st fel i sin HTML-kod. Det är inte så bra.\n" + +msgid "TEXT_REVIEW_HTML_IS_VERY_BAD" +msgstr "* Den testade sidan har massor med fel i sin HTML-kod. Hela {0} st. \n" diff --git a/locales/sv/LC_MESSAGES/page_not_found.mo b/locales/sv/LC_MESSAGES/page_not_found.mo new file mode 100644 index 00000000..66977ce4 Binary files /dev/null and b/locales/sv/LC_MESSAGES/page_not_found.mo differ diff --git a/locales/sv/LC_MESSAGES/page_not_found.po b/locales/sv/LC_MESSAGES/page_not_found.po new file mode 100644 index 00000000..cd76808d --- /dev/null +++ b/locales/sv/LC_MESSAGES/page_not_found.po @@ -0,0 +1,37 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:00+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 2 - 404 (sida finns inte)\n###############################" + +msgid "TEXT_REVIEW_WRONG_STATUS_CODE" +msgstr "* Fel statuskod. Fick {0} när 404 vore korrekt.\n" + +msgid "TEXT_REVIEW_NO_TITLE" +msgstr "* Hittade ingen titel på sidan\n" + +msgid "TEXT_REVIEW_MAIN_HEADER" +msgstr "* Hittade ingen huvudrubrik (h1)\n" + +msgid "TEXT_REVIEW_NO_SWEDISH_ERROR_MSG" +msgstr "* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n" + +msgid "TEXT_REVIEW_ERROR_MSG_UNDER_150" +msgstr "* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n" + +msgid "TEXT_REVIEW_NO_REMARKS" +msgstr "* Inga anmärkningar." diff --git a/locales/sv/LC_MESSAGES/performance_lighthouse.mo b/locales/sv/LC_MESSAGES/performance_lighthouse.mo new file mode 100644 index 00000000..064e25c1 Binary files /dev/null and b/locales/sv/LC_MESSAGES/performance_lighthouse.mo differ diff --git a/locales/sv/LC_MESSAGES/performance_lighthouse.po b/locales/sv/LC_MESSAGES/performance_lighthouse.po new file mode 100644 index 00000000..cce50f71 --- /dev/null +++ b/locales/sv/LC_MESSAGES/performance_lighthouse.po @@ -0,0 +1,56 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 22:00+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 1 - Prestanda (Google Lighthouse)\n###############################" + +msgid "TEXT_REVIEW_VERY_GOOD" +msgstr "* Webbplatsen laddar in mycket snabbt!\n" + +msgid "TEXT_REVIEW_IS_GOOD" +msgstr "* Webbplatsen är snabb.\n" + +msgid "TEXT_REVIEW_IS_OK" +msgstr "* Genomsnittlig hastighet.\n" + +msgid "TEXT_REVIEW_IS_BAD" +msgstr "* Webbplatsen är ganska långsam.\n" + +msgid "TEXT_REVIEW_IS_VERY_BAD" +msgstr "* Webbplatsen är väldigt långsam!\n" + +msgid "TEXT_REVIEW_OBSERVED_SPEED" +msgstr "* Observerad hastighet: {} sekunder\n" + +msgid "TEXT_REVIEW_FIRST_MEANINGFUL_PAINT" +msgstr "* Första meningsfulla visuella ändring: {} sek\n" + +msgid "TEXT_REVIEW_FIRST_MEANINGFUL_PAINT_3G" +msgstr "* Första meningsfulla visuella ändring på 3G: {} sek\n" + +msgid "TEXT_REVIEW_CPU_IDLE" +msgstr "* CPU vilar efter: {} sek\n" + +msgid "TEXT_REVIEW_INTERACTIVE" +msgstr "* Webbplatsen är interaktiv: {} sek\n" + +msgid "TEXT_REVIEW_REDIRECTS" +msgstr "* Tid spenderad på hänvisningar: {} sek\n" + +msgid "TEXT_REVIEW_TOTAL_WEIGHT" +msgstr "* Sidans totala vikt: {} kb\n" + diff --git a/locales/sv/LC_MESSAGES/privacy_webbkollen.mo b/locales/sv/LC_MESSAGES/privacy_webbkollen.mo new file mode 100644 index 00000000..6e308b90 Binary files /dev/null and b/locales/sv/LC_MESSAGES/privacy_webbkollen.mo differ diff --git a/locales/sv/LC_MESSAGES/privacy_webbkollen.po b/locales/sv/LC_MESSAGES/privacy_webbkollen.po new file mode 100644 index 00000000..b8c70873 --- /dev/null +++ b/locales/sv/LC_MESSAGES/privacy_webbkollen.po @@ -0,0 +1,34 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 20:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 20 - Privacy (Webbkollen)\n###############################" + +msgid "TEXT_REVIEW_VERY_GOOD" +msgstr "* Webbplatsen är bra på integritet!\n" + +msgid "TEXT_REVIEW_IS_GOOD" +msgstr "* Webbplatsen kan bli bättre, men är helt ok.\n" + +msgid "TEXT_REVIEW_IS_OK" +msgstr "* Ok integritet men borde bli bättre.\n" + +msgid "TEXT_REVIEW_IS_BAD" +msgstr "* Dålig integritet.\n" + +msgid "TEXT_REVIEW_IS_VERY_BAD" +msgstr "* Väldigt dålig integritet!\n" diff --git a/locales/sv/LC_MESSAGES/pwa_lighthouse.mo b/locales/sv/LC_MESSAGES/pwa_lighthouse.mo new file mode 100644 index 00000000..f256d0de Binary files /dev/null and b/locales/sv/LC_MESSAGES/pwa_lighthouse.mo differ diff --git a/locales/sv/LC_MESSAGES/pwa_lighthouse.po b/locales/sv/LC_MESSAGES/pwa_lighthouse.po new file mode 100644 index 00000000..9f09365f --- /dev/null +++ b/locales/sv/LC_MESSAGES/pwa_lighthouse.po @@ -0,0 +1,39 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 8 - Progressiv webbapp (Google Lighthouse)" + +msgid "TEXT_REVIEW_PWA_VERY_GOOD" +msgstr "* Webbplatsen följer fullt ut praxis för progressiva webbappar!\n" + +msgid "TEXT_REVIEW_PWA_IS_GOOD" +msgstr "* Webbplatsen har lite förbättrings­potential för en progressiv webbapp.\n" + +msgid "TEXT_REVIEW_PWA_IS_OK" +msgstr "* Genomsnittlig efterlevnad till praxis för progressiva webbappar.\n" + +msgid "TEXT_REVIEW_PWA_IS_BAD" +msgstr "* Webbplatsen är ganska dålig som progressiv webbapp.\n" + +msgid "TEXT_REVIEW_PWA_IS_VERY_BAD" +msgstr "* Webbplatsen är inte alls bra som progressiv webbapp :/\n" + +msgid "TEXT_REVIEW_PWA_NUMBER_OF_PROBLEMS" +msgstr "* Antal problem med praxis för progressiva webbappar: {} st\n" + +msgid "TEXT_REVIEW_PWA_PROBLEMS" +msgstr "\nProblem:\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/seo_lighthouse.mo b/locales/sv/LC_MESSAGES/seo_lighthouse.mo new file mode 100644 index 00000000..05461b35 Binary files /dev/null and b/locales/sv/LC_MESSAGES/seo_lighthouse.mo differ diff --git a/locales/sv/LC_MESSAGES/seo_lighthouse.po b/locales/sv/LC_MESSAGES/seo_lighthouse.po new file mode 100644 index 00000000..4c806d66 --- /dev/null +++ b/locales/sv/LC_MESSAGES/seo_lighthouse.po @@ -0,0 +1,39 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 10:45+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: Marcus \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 4 - Sökmotoroptimering (Google Lighthouse)" + +msgid "TEXT_REVIEW_SEO_VERY_GOOD" +msgstr "* Webbplatsen följer god SEO-praxis fullt ut!\n" + +msgid "TEXT_REVIEW_SEO_IS_GOOD" +msgstr "* Webbplatsen har ändå förbättrings­potential inom SEO.\n" + +msgid "TEXT_REVIEW_SEO_IS_OK" +msgstr "* Genomsnittlig efterlevnad till SEO-praxis.\n" + +msgid "TEXT_REVIEW_SEO_IS_BAD" +msgstr "* Webbplatsen är ganska dålig på sökmotoroptimering.\n" + +msgid "TEXT_REVIEW_SEO_IS_VERY_BAD" +msgstr "* Webbplatsen är inte alls bra på sökmotoroptimering!\n" + +msgid "TEXT_REVIEW_SEO_NUMBER_OF_PROBLEMS" +msgstr "* Antal problem med sökmotoroptimering (SEO): {} st\n" + +msgid "TEXT_REVIEW_SEO_PROBLEMS" +msgstr "\nProblem:\n" \ No newline at end of file diff --git a/locales/sv/LC_MESSAGES/standard_files.mo b/locales/sv/LC_MESSAGES/standard_files.mo new file mode 100644 index 00000000..45c60bdd Binary files /dev/null and b/locales/sv/LC_MESSAGES/standard_files.mo differ diff --git a/locales/sv/LC_MESSAGES/standard_files.po b/locales/sv/LC_MESSAGES/standard_files.po new file mode 100644 index 00000000..7fe8e623 --- /dev/null +++ b/locales/sv/LC_MESSAGES/standard_files.po @@ -0,0 +1,58 @@ +# Swedish +# Copyright (C) 2020 WebPerf +# FIRST AUTHOR , 2020. +# +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"POT-Creation-Date: 2020-12-12 11:15+0200\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: mattias \n" +"Language-Team: Swedish \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Generated-By: pygettext.py 1.5\n" + + +msgid "TEXT_RUNNING_TEST" +msgstr "###############################\nKör test: 9 - Standardfiler\n###############################" + +msgid "TEXT_ROBOTS_MISSING" +msgstr "* robots.txt saknas, får inte lov att hämtas eller har inte förväntat innehåll.\n" + +msgid "TEXT_ROBOTS_OK" +msgstr "* robots.txt verkar ok.\n" + +msgid "TEXT_SITEMAP_MISSING" +msgstr "* Sitemap anges inte i robots.txt\n" + +msgid "TEXT_SITEMAP_FOUND" +msgstr "* Sitemap finns omnämnd i robots.txt\n" + +msgid "TEXT_SITEMAP_BROKEN" +msgstr "* Sitemap verkar vara trasig.\n" + +msgid "TEXT_SITEMAP_OK" +msgstr "* Sitemap verkar fungera.\n" + +msgid "TEXT_RSS_FEED_MISSING" +msgstr "* RSS-prenumeration saknas i meta.\n" + +msgid "TEXT_RSS_FEED_FOUND" +msgstr "* RSS-prenumeration hittad.\n" + +msgid "TEXT_SECURITY_MISSING" +msgstr "* security.txt saknas.\n" + +msgid "TEXT_SECURITY_WRONG_CONTENT" +msgstr "* security.txt har inte förväntat innehåll.\n" + +msgid "TEXT_SECURITY_OK_CONTENT" +msgstr "* security.txt verkar ok.\n" + +msgid "TEXT_SECURITY_REQUIRED_CONTACT_MISSING" +msgstr "* security.txt kontakt fält saknas.\n" + +msgid "TEXT_SECURITY_REQUIRED_EXPIRES_MISSING" +msgstr "* security.txt fält för utgångstid saknas (krav tillagt i utkast v10).\n" diff --git a/locales/sv/LC_MESSAGES/webperf-core.mo b/locales/sv/LC_MESSAGES/webperf-core.mo index 02c37e7e..33df8cb3 100644 Binary files a/locales/sv/LC_MESSAGES/webperf-core.mo and b/locales/sv/LC_MESSAGES/webperf-core.mo differ diff --git a/locales/sv/LC_MESSAGES/webperf-core.po b/locales/sv/LC_MESSAGES/webperf-core.po index fc21f098..5879334d 100644 --- a/locales/sv/LC_MESSAGES/webperf-core.po +++ b/locales/sv/LC_MESSAGES/webperf-core.po @@ -15,103 +15,88 @@ msgstr "" "Generated-By: pygettext.py 1.5\n" -#: checks.py:47 -msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" -msgstr "* Fel statuskod. Fick {0} när 404 vore korrekt.\n" +msgid "TEXT_WEBSITE_URL_ADDED" +msgstr "webbplats med adress: {0} har blivit tillagd\n" -#: checks.py:67 -msgid "TEST_404_REVIEW_NO_TITLE" -msgstr "* Hittade ingen titel på sidan\n" +msgid "TEXT_WEBSITE_URL_DELETED" +msgstr "webbplats med adress: {0} har blivit borttagen\n" -#: checks.py:77 -msgid "TEST_404_REVIEW_MAIN_HEADER" -msgstr "* Hittade ingen huvudrubrik (h1)\n" - -#: checks.py:135 -msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" -msgstr "* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n" - -#: checks.py:143 -msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" -msgstr "* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n" - -#: checks.py:146 -msgid "TEST_REVIEW_NO_REMARKS" -msgstr "* Inga anmärkningar." - -#: default.py:21 +#: default.py:24 msgid "TEXT_TEST_START_HEADER" msgstr "###############################################" -#: default.py:25 +#: default.py:28 msgid "TEXT_TESTING_NUMBER_OF_SITES" msgstr "Webbadresser som testas {0}" -#: default.py:30 +#: default.py:33 msgid "TEXT_TESTING_SITE" msgstr "{0}. Testar adress {1}" -#: default.py:46 +#: default.py:61 msgid "TEXT_SITE_RATING" msgstr "Betyg: " -#: default.py:48 +#: default.py:63 msgid "TEXT_SITE_REVIEW" msgstr "Omdöme:\n" -#: default.py:66 +#: default.py:81 msgid "TEXT_EXCEPTION" msgstr "Fel, någon behöver ta en titt på detta." -#: default.py:74 +#: default.py:89 msgid "TEXT_TESTING_START_HEADER" msgstr "### {0} ###" -#: default.py:78 -msgid "TEXT_TEST_GOOGLE_PAGESPEED" -msgstr "###############################\nKör test: 0 - Google Pagespeed" +#: default.py:127 +msgid "TEXT_TEST_VALID_ARGUMENTS" +msgstr "Giltiga argument att välja på -t/--test:" -#: default.py:81 -msgid "TEXT_TEST_PAGE_NOT_FOUND" -msgstr "###############################\nKör test: 2 - 404 (Page not Found)" +#: default.py:128 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE" +msgstr "-t 1\t: Prestanda (Google Lighthouse)" -#: default.py:84 -msgid "TEXT_TEST_HTML" -msgstr "###############################\nKör test: 6 - HTML" +#: default.py:129 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_SEO" +msgstr "-t 4\t: Sökmotoroptimering (Google Lighthouse)" -#: default.py:87 -msgid "TEXT_TEST_CSS" -msgstr "###############################\nKör test: 7 - CSS" +#: default.py:130 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_A11Y" +msgstr "-t 10\t: Tillgänglighet (Google Lighthouse)" -#: default.py:90 -msgid "TEXT_TEST_WEBBKOLL" -msgstr "###############################\nKör test: 20 - Webbkoll" +#: default.py:131 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_PWA" +msgstr "-t 8\t: Progressiv webbapp (Google Lighthouse)" -#: default.py:96 -msgid "TEXT_TEST_VALID_ARGUMENTS" -msgstr "Giltiga argument att välja på -t/--test:" +#: default.py:132 +msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_LIGHTHOUSE_BEST_PRACTICE" +msgstr "-t 5\t: God praxis (Google Lighthouse)" -#: default.py:97 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_PAGESPEED" -msgstr "-t 0\t: Google Pagespeed" +msgid "TEXT_TEST_VALID_ARGUMENTS_YELLOW_LAB_TOOLS" +msgstr "-t 17\t: Kvalitet på frontend (Yellow Lab Tools)" -#: default.py:98 +#: default.py:133 msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" -msgstr "-t 2\t: 404 (Page not Found)" +msgstr "-t 2\t: 404 (sida finns inte)" -#: default.py:99 +#: default.py:134 msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" -msgstr "-t 6\t: HTML" +msgstr "-t 6\t: HTML validering" -#: default.py:100 +#: default.py:135 msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" -msgstr "-t 7\t: CSS" +msgstr "-t 7\t: CSS validering" -#: default.py:101 +#: default.py:136 msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" -msgstr "-t 20\t: Webbkoll" +msgstr "-t 20\t: Integritet & Säkerhet (Webbkoll)" + +#: default.py:137 +msgid "TEXT_TEST_VALID_ARGUMENTS_STANDARD_FILES" +msgstr "-t 9\t: Standardfiler" #: default.py:137 default.py:141 default.py:146 default.py:213 msgid "TEXT_COMMAND_USAGE" -msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test <1/2/6/7/20>\t\t: kör ett enda specifikt test mot angiven webbplats(er)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" +msgstr "\n\tWebPerf Core\n\n\tAnvänd så här:\ndefault.py -u https://webperf.se\n\n\tVal och argument:\n\t-h/--help\t\t\t: Hjälp och hur du använder skriptet\n\t-u/--url \t\t: webbplatsens adress att testa\n\t-t/--test \t\t: kör ett specifikt test (ange ? för att lista tillgängliga tester)\n\t-r/--review\t\t\t: visar omdömen direkt i terminalen\n\t-i/--input \t\t: sökväg för input-fil (.json/.sqlite)\n\t-i/--input-skip \t: antal att hoppa över\n\t-i/--input-take \t: antal att testa\n\t-o/--output \t\t: sökväg till output-fil (.json/.csv/.sql/.sqlite)\n\t-A/--addUrl \t\t: webbplatsens adress/url (ett krav när du använder -i/--input)\n\t-D/--deleteUrl \t: webbplats adress/url (ett krav när du använder -i/--input)\n\t-L/--language \t: språk som används för output(en = default/sv)" diff --git a/locales/webperf-core.pot b/locales/webperf-core.pot deleted file mode 100644 index 035f6f56..00000000 --- a/locales/webperf-core.pot +++ /dev/null @@ -1,117 +0,0 @@ -# SOME DESCRIPTIVE TITLE. -# Copyright (C) YEAR ORGANIZATION -# FIRST AUTHOR , YEAR. -# -msgid "" -msgstr "" -"Project-Id-Version: PACKAGE VERSION\n" -"POT-Creation-Date: 2020-05-23 17:29+0200\n" -"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" -"Last-Translator: FULL NAME \n" -"Language-Team: LANGUAGE \n" -"MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=UTF-8\n" -"Content-Transfer-Encoding: 8bit\n" -"Generated-By: pygettext.py 1.5\n" - - -#: checks.py:47 -msgid "TEST_404_REVIEW_WRONG_STATUS_CODE" -msgstr "" - -#: checks.py:67 -msgid "TEST_404_REVIEW_NO_TITLE" -msgstr "" - -#: checks.py:77 -msgid "TEST_404_REVIEW_MAIN_HEADER" -msgstr "" - -#: checks.py:135 -msgid "TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG" -msgstr "" - -#: checks.py:143 -msgid "TEST_404_REVIEW_ERROR_MSG_UNDER_150" -msgstr "" - -#: checks.py:146 -msgid "TEST_REVIEW_NO_REMARKS" -msgstr "" - -#: default.py:21 -msgid "TEXT_TEST_START_HEADER" -msgstr "" - -#: default.py:25 -msgid "TEXT_TESTING_NUMBER_OF_SITES" -msgstr "" - -#: default.py:30 -msgid "TEXT_TESTING_SITE" -msgstr "" - -#: default.py:46 -msgid "TEXT_SITE_RATING" -msgstr "" - -#: default.py:48 -msgid "TEXT_SITE_REVIEW" -msgstr "" - -#: default.py:66 -msgid "TEXT_EXCEPTION" -msgstr "" - -#: default.py:74 -msgid "TEXT_TESTING_START_HEADER" -msgstr "" - -#: default.py:78 -msgid "TEXT_TEST_GOOGLE_PAGESPEED" -msgstr "" - -#: default.py:81 -msgid "TEXT_TEST_PAGE_NOT_FOUND" -msgstr "" - -#: default.py:84 -msgid "TEXT_TEST_HTML" -msgstr "" - -#: default.py:87 -msgid "TEXT_TEST_CSS" -msgstr "" - -#: default.py:90 -msgid "TEXT_TEST_WEBBKOLL" -msgstr "" - -#: default.py:96 -msgid "TEXT_TEST_VALID_ARGUMENTS" -msgstr "" - -#: default.py:97 -msgid "TEXT_TEST_VALID_ARGUMENTS_GOOGLE_PAGESPEED" -msgstr "" - -#: default.py:98 -msgid "TEXT_TEST_VALID_ARGUMENTS_PAGE_NOT_FOUND" -msgstr "" - -#: default.py:99 -msgid "TEXT_TEST_VALID_ARGUMENTS_HTML" -msgstr "" - -#: default.py:100 -msgid "TEXT_TEST_VALID_ARGUMENTS_CSS" -msgstr "" - -#: default.py:101 -msgid "TEXT_TEST_VALID_ARGUMENTS_WEBBKOLL" -msgstr "" - -#: default.py:212 default.py:243 -msgid "TEXT_COMMAND_USAGE" -msgstr "" - diff --git a/models.py b/models.py index f6682f58..e16aceab 100644 --- a/models.py +++ b/models.py @@ -10,6 +10,22 @@ class Sites(object): website = "" active = 1 + def __init__(self, id, website): + self.id = id + self.website = website + + def todata(self): + result = { + 'id': self.id, + 'website': self.website + } + return result + + @staticmethod + def fieldnames(): + result = [ 'id', 'website'] + return result + def __repr__(self): return '' % self.title diff --git a/tests/a11y_lighthouse.py b/tests/a11y_lighthouse.py new file mode 100644 index 00000000..92634572 --- /dev/null +++ b/tests/a11y_lighthouse.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * +import gettext +_ = gettext.gettext + +# DEFAULTS +googlePageSpeedApiKey = config.googlePageSpeedApiKey + + +def run_test(langCode, url, strategy='mobile', category='accessibility'): + + language = gettext.translation( + 'a11y_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( + category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + \ + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = _('TEXT_REVIEW_A11Y_VERY_GOOD') + elif fails <= 2: + points = 4 + review = _('TEXT_REVIEW_A11Y_IS_GOOD') + elif fails <= 3: + points = 3 + review = _('TEXT_REVIEW_A11Y_IS_OK') + elif fails <= 5: + points = 2 + review = _('TEXT_REVIEW_A11Y_IS_BAD') + elif fails > 5: + points = 1 + review = _('TEXT_REVIEW_A11Y_IS_VERY_BAD') + + if fails != 0: + review += _('TEXT_REVIEW_A11Y_IS_VERY_BAD').format(fails) + + for key, value in return_dict.items(): + if value == 0: + review += '* {}\n'.format(fail_dict[key]) + + return (points, review, return_dict) diff --git a/tests/best_practice_lighthouse.py b/tests/best_practice_lighthouse.py new file mode 100644 index 00000000..44118082 --- /dev/null +++ b/tests/best_practice_lighthouse.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * +import gettext +_ = gettext.gettext + +# DEFAULTS +googlePageSpeedApiKey = config.googlePageSpeedApiKey + + +def run_test(langCode, url, strategy='mobile', category='best-practices'): + language = gettext.translation( + 'best_practice_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&strategy={2}&key={3}'.format( + category, check_url, strategy, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + \ + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = _('TEXT_REVIEW_PRACTICE_VERY_GOOD') + elif fails <= 2: + points = 4 + review = _('TEXT_REVIEW_PRACTICE_IS_GOOD') + elif fails <= 3: + points = 3 + review = _('TEXT_REVIEW_PRACTICE_IS_OK') + elif fails <= 4: + points = 2 + review = _('TEXT_REVIEW_PRACTICE_IS_BAD') + elif fails > 4: + points = 1 + review = _('TEXT_REVIEW_PRACTICE_IS_VERY_BAD') + + review += _('TEXT_REVIEW_PRACTICE_NUMBER_OF_PROBLEMS').format(fails) + + if fails != 0: + review += _('TEXT_REVIEW_PRACTICE_PROBLEMS') + + for key, value in return_dict.items(): + if value == 0: + review += '* {}\n'.format(fail_dict[key]) + # print(key) + + return (points, review, return_dict) diff --git a/tests/w3c_validate_css.py b/tests/css_validator_w3c.py similarity index 67% rename from tests/w3c_validate_css.py rename to tests/css_validator_w3c.py index 355e2314..a2faf4a8 100644 --- a/tests/w3c_validate_css.py +++ b/tests/css_validator_w3c.py @@ -10,11 +10,14 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent -def run_test(url): +def run_test(langCode, url): """ Only work on a domain-level. Returns tuple with decimal for grade and string with review """ @@ -22,10 +25,16 @@ def run_test(url): points = 0.0 review = '' + language = gettext.translation('css_validator_w3c', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + ## kollar koden try: url = 'https://jigsaw.w3.org/css-validator/validator?uri={0}&profile=css3svg&usermedium=all&warning=1&vextwarning=&lang=en'.format(url.replace('/', '%2F').replace(':', '%3A')) - headers = {'user-agent': 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'} + headers = {'user-agent': useragent} request = requests.get(url, allow_redirects=False, headers=headers, timeout=request_timeout*2) ## hämta HTML @@ -38,18 +47,18 @@ def run_test(url): if errors == 0: points = 5.0 - review = '* Inga fel i CSS-koden.\n' + review = _('TEXT_REVIEW_CSS_VERY_GOOD') elif errors <= 5: points = 4.0 - review = '* Den testade sidan har {0} st fel i sin CSS-kod.\n'.format(errors) + review = _('TEXT_REVIEW_CSS_IS_GOOD').format(errors) elif errors <= 10: points = 3.0 - review = '* Den testade sidan har {0} st fel i sin CSS-kod.\n'.format(errors) + review = _('TEXT_REVIEW_CSS_IS_OK').format(errors) elif errors <= 20: points = 2.0 - review = '* Den testade sidan har {0} st fel i sin CSS-kod. Det är inte så bra.\n'.format(errors) + review = _('TEXT_REVIEW_CSS_IS_BAD').format(errors) elif errors > 20: points = 1.0 - review = '* Den testade sidan har massor med fel i sin CSS-kod. Hela {0} st. \n'.format(errors) + review = _('TEXT_REVIEW_CSS_IS_VERY_BAD').format(errors) return (points, review) diff --git a/tests/frontend_quality_yellow_lab_tools.py b/tests/frontend_quality_yellow_lab_tools.py new file mode 100644 index 00000000..824efb9d --- /dev/null +++ b/tests/frontend_quality_yellow_lab_tools.py @@ -0,0 +1,83 @@ +#-*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * +import gettext +_ = gettext.gettext + +### DEFAULTS +googlePageSpeedApiKey = config.googlePageSpeedApiKey + +def run_test(langCode, url, device='phone'): + """ + Analyzes URL with Yellow Lab Tools docker image. + Devices might be; phone, tablet, desktop + """ + + language = gettext.translation('frontend_quality_yellow_lab_tools', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_("TEXT_RUNNING_TEST")) + + r = requests.post('https://yellowlab.tools/api/runs', data = {'url':url, "waitForResponse":'true', 'device': device}) + + result_url = r.url + test_id = result_url.rsplit('/', 1)[1] + + result_json = httpRequestGetContent('https://yellowlab.tools/api/results/{0}?exclude=toolsResults'.format(test_id)) + result_dict = json.loads(result_json) + + return_dict = {} + + for key in result_dict['scoreProfiles']['generic'].keys(): + if key == 'globalScore': + return_dict[key] = result_dict['scoreProfiles']['generic'][key] + + for key in result_dict['scoreProfiles']['generic']['categories'].keys(): + return_dict[key] = result_dict['scoreProfiles']['generic']['categories'][key]['categoryScore'] + + review = '' + yellow_lab = return_dict["globalScore"] + + rating = (int(yellow_lab) / 20) + 0.5 + + if rating > 5: + rating = 5 + elif rating < 1: + rating = 1 + + if rating == 5: + review = _("TEXT_WEBSITE_IS_VERY_GOOD") + elif rating >= 4: + review = _("TEXT_WEBSITE_IS_GOOD") + elif rating >= 3: + review = _("TEXT_WEBSITE_IS_OK") + elif rating >= 2: + review = _("TEXT_WEBSITE_IS_BAD") + elif rating <= 1: + review = _("TEXT_WEBSITE_IS_VERY_BAD") + + review += _("TEXT_OVERALL_GRADE").format(return_dict["globalScore"]) + review += _("TEXT_TESTED_ON_DEVICETYPE").format(device) + review += _("TEXT_PAGE_WEIGHT").format(return_dict["pageWeight"]) + review += _("TEXT_PAGE_REQUESTS").format(return_dict["requests"]) + review += _("TEXT_PAGE_DOM_COMPLEXITY").format(return_dict["domComplexity"]) + review += _("TEXT_PAGE_DOM_MANIPULATIONS").format(return_dict["domManipulations"]) + review += _("TEXT_PAGE_SCROLL").format(return_dict["scroll"]) + review += _("TEXT_PAGE_BAD_JS").format(return_dict["badJavascript"]) + review += _("TEXT_PAGE_JQUERY").format(return_dict["jQuery"]) + review += _("TEXT_PAGE_CSS_COMPLEXITY").format(return_dict["cssComplexity"]) + review += _("TEXT_PAGE_BAD_CSS").format(return_dict["badCSS"]) + review += _("TEXT_PAGE_FONTS").format(return_dict["fonts"]) + review += _("TEXT_SERVER_CONFIG").format(return_dict["serverConfig"]) + + return (rating, review, return_dict) \ No newline at end of file diff --git a/tests/w3c_validate_html.py b/tests/html_validator_w3c.py similarity index 66% rename from tests/w3c_validate_html.py rename to tests/html_validator_w3c.py index db584364..c7c92877 100644 --- a/tests/w3c_validate_html.py +++ b/tests/html_validator_w3c.py @@ -10,11 +10,14 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent -def run_test(url): +def run_test(langCode, url): """ Only work on a domain-level. Returns tuple with decimal for grade and string with review """ @@ -22,10 +25,16 @@ def run_test(url): points = 0.0 review = '' + language = gettext.translation('html_validator_w3c', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + ## kollar koden try: url = 'https://validator.w3.org/nu/?doc={0}'.format(url.replace('/', '%2F').replace(':', '%3A')) - headers = {'user-agent': 'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)'} + headers = {'user-agent': useragent} request = requests.get(url, allow_redirects=False, headers=headers, timeout=request_timeout) ## hämta HTML @@ -38,18 +47,18 @@ def run_test(url): if errors == 0: points = 5.0 - review = '* Inga fel i HTML-koden.\n' + review = _('TEXT_REVIEW_HTML_VERY_GOOD') elif errors <= 5: points = 4.0 - review = '* Den testade sidan har {0} st fel i sin HTML-kod.\n'.format(errors) + review = _('TEXT_REVIEW_HTML_IS_GOOD').format(errors) elif errors <= 15: points = 3.0 - review = '* Den testade sidan har {0} st fel i sin HTML-kod.\n'.format(errors) + review = _('TEXT_REVIEW_HTML_IS_OK').format(errors) elif errors <= 30: points = 2.0 - review = '* Den testade sidan har {0} st fel i sin HTML-kod. Det är inte så bra.\n'.format(errors) + review = _('TEXT_REVIEW_HTML_IS_BAD').format(errors) elif errors > 30: points = 1.0 - review = '* Den testade sidan har massor med fel i sin HTML-kod. Hela {0} st. \n'.format(errors) + review = _('TEXT_REVIEW_HTML_IS_VERY_BAD').format(errors) return (points, review) diff --git a/tests/page_not_found.py b/tests/page_not_found.py index a3d598ee..0c44811c 100644 --- a/tests/page_not_found.py +++ b/tests/page_not_found.py @@ -10,11 +10,14 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent -def run_test(url): +def run_test(langCode, url): """ Only work on a domain-level. Returns tuple with decimal for grade and string with review """ @@ -23,16 +26,22 @@ def run_test(url): review = '' result_dict = {} + language = gettext.translation('page_not_found', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + ## kollar koden o = urllib.parse.urlparse(url) url = '{0}://{1}/{3}/{2}'.format(o.scheme, o.netloc, 'finns-det-en-sida/pa-den-har-adressen/testanrop/', get_guid(5)) - headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0'} + headers = {'user-agent': useragent} request = requests.get(url, allow_redirects=True, headers=headers, timeout=request_timeout) code = request.status_code if code == 404: points += 2.0 else: - review = review + _('TEST_404_REVIEW_WRONG_STATUS_CODE').format(request.status_code) #'* Fel statuskod. Fick {0} när 404 vore korrekt.\n'.format(request.status_code) + review = review + _('TEXT_REVIEW_WRONG_STATUS_CODE').format(request.status_code) result_dict['status_code'] = code @@ -52,7 +61,7 @@ def run_test(url): if title: result_dict['page_title'] = title.string else: - review = review + _('TEST_404_REVIEW_NO_TITLE') #'* hittade ingen titel på sidan\n' + review = review + _('TEXT_REVIEW_NO_TITLE') except: print('Error getting page title!\nMessage:\n{0}'.format(sys.exc_info()[0])) @@ -62,7 +71,7 @@ def run_test(url): if h1: result_dict['h1'] = h1.string else: - review = review + _('TEST_404_REVIEW_MAIN_HEADER') #'* hittade ingen huvud rubrik (h1)\n' + review = review + _('TEXT_REVIEW_MAIN_HEADER') except: print('Error getting H1!\nMessage:\n{0}'.format(sys.exc_info()[0])) @@ -121,17 +130,17 @@ def run_test(url): if found_match == False: - review = review + _('TEST_404_REVIEW_NO_SWEDISH_ERROR_MSG') #'* Verkar sakna text som beskriver att ett fel uppstått (på svenska).\n' + review = review + _('TEXT_REVIEW_NO_SWEDISH_ERROR_MSG') ## hur långt är inehållet soup = BeautifulSoup(request.text, 'html.parser') if len(soup.get_text()) > 150: points += 1.5 else: - review = review + _('TEST_404_REVIEW_ERROR_MSG_UNDER_150') #'* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n' + review = review + _('TEXT_REVIEW_ERROR_MSG_UNDER_150') #'* Information är under 150 tecken, vilket tyder på att användaren inte vägleds vidare.\n' if len(review) == 0: - review = _('TEST_REVIEW_NO_REMARKS') + review = _('TEXT_REVIEW_NO_REMARKS') if points == 0: points = 1.0 diff --git a/tests/lighthouse.py b/tests/performance_lighthouse.py similarity index 55% rename from tests/lighthouse.py rename to tests/performance_lighthouse.py index 72c8f93a..1833b917 100644 --- a/tests/lighthouse.py +++ b/tests/performance_lighthouse.py @@ -10,12 +10,13 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS -request_timeout = config.http_request_timeout googlePageSpeedApiKey = config.googlePageSpeedApiKey -def run_test(url, strategy='mobile', category='performance'): +def run_test(langCode, url, strategy='mobile', category='performance'): """ perf = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=performance&strategy=mobile&url=YOUR-SITE&key=YOUR-KEY a11y = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=accessibility&strategy=mobile&url=YOUR-SITE&key=YOUR-KEY @@ -23,6 +24,13 @@ def run_test(url, strategy='mobile', category='performance'): pwa = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=pwa&strategy=mobile&url=YOUR-SITE&key=YOUR-KEY seo = https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category=seo&strategy=mobile&url=YOUR-SITE&key=YOUR-KEY """ + + language = gettext.translation('performance_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + check_url = url.strip() pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&strategy={2}&key={3}'.format(category, check_url, strategy, googlePageSpeedApiKey) @@ -62,26 +70,26 @@ def run_test(url, strategy='mobile', category='performance'): if speedindex <= 500: points = 5 - review = '* Webbplatsen laddar in mycket snabbt!\n' + review = _("TEXT_REVIEW_VERY_GOOD") elif speedindex <= 1200: points = 4 - review = '* Webbplatsen är snabb.\n' + review = _("TEXT_REVIEW_IS_GOOD") elif speedindex <= 2500: points = 3 - review = '* Genomsnittlig hastighet.\n' + review = _("TEXT_REVIEW_IS_OK") elif speedindex <= 3999: points = 2 - review = '* Webbplatsen är ganska långsam.\n' + review = _("TEXT_REVIEW_IS_BAD") elif speedindex > 3999: points = 1 - review = '* Webbplatsen är väldigt långsam!\n' + review = _("TEXT_REVIEW_IS_VERY_BAD") - review += '* Observerad hastighet: {} sekunder\n'.format(convert_to_seconds(return_dict["observedSpeedIndex"], False)) - review += '* Första meningsfulla visuella ändring: {} sek\n'.format(convert_to_seconds(return_dict["firstMeaningfulPaint"], False)) - review += '* Första meningsfulla visuella ändring på 3G: {} sek\n'.format(convert_to_seconds(return_dict["first-contentful-paint-3g"], False)) - review += '* CPU vilar efter: {} sek\n'.format(convert_to_seconds(return_dict["firstCPUIdle"], False)) - review += '* Webbplatsen är interaktiv: {} sek\n'.format(convert_to_seconds(return_dict["interactive"], False)) - review += '* Antal hänvisningar: {} st\n'.format(return_dict["redirects"]) - review += '* Sidans totala vikt: {} kb\n'.format(int(return_dict["total-byte-weight"]/1000)) + review += _("TEXT_REVIEW_OBSERVED_SPEED").format(convert_to_seconds(return_dict["observedSpeedIndex"], False))#'* Observerad hastighet: {} sekunder\n'.format(convert_to_seconds(return_dict["observedSpeedIndex"], False)) + review += _("TEXT_REVIEW_FIRST_MEANINGFUL_PAINT").format(convert_to_seconds(return_dict["firstMeaningfulPaint"], False))#'* Första meningsfulla visuella ändring: {} sek\n'.format(convert_to_seconds(return_dict["firstMeaningfulPaint"], False)) + review += _("TEXT_REVIEW_FIRST_MEANINGFUL_PAINT_3G").format(convert_to_seconds(return_dict["first-contentful-paint-3g"], False))#'* Första meningsfulla visuella ändring på 3G: {} sek\n'.format(convert_to_seconds(return_dict["first-contentful-paint-3g"], False)) + review += _("TEXT_REVIEW_CPU_IDLE").format(convert_to_seconds(return_dict["firstCPUIdle"], False))#'* CPU vilar efter: {} sek\n'.format(convert_to_seconds(return_dict["firstCPUIdle"], False)) + review += _("TEXT_REVIEW_INTERACTIVE").format(convert_to_seconds(return_dict["interactive"], False))#'* Webbplatsen är interaktiv: {} sek\n'.format(convert_to_seconds(return_dict["interactive"], False)) + review += _("TEXT_REVIEW_REDIRECTS").format(convert_to_seconds(return_dict["redirects"], False))#'* Antal hänvisningar: {} st\n'.format(return_dict["redirects"]) + review += _("TEXT_REVIEW_TOTAL_WEIGHT").format(int(return_dict["total-byte-weight"]/1000))#'* Sidans totala vikt: {} kb\n'.format(int(return_dict["total-byte-weight"]/1000)) return (points, review, return_dict) diff --git a/tests/privacy_webbkollen.py b/tests/privacy_webbkollen.py index cc592999..b8aac928 100644 --- a/tests/privacy_webbkollen.py +++ b/tests/privacy_webbkollen.py @@ -10,17 +10,34 @@ from bs4 import BeautifulSoup import config from tests.utils import * +import gettext +_ = gettext.gettext ### DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent -def run_test(url): +def run_test(langCode, url): import time points = 0.0 errors = 0 review = '' - url = 'https://webbkoll.dataskydd.net/sv/check?url={0}'.format(url.replace('/', '%2F').replace(':', '%3A')) + language = gettext.translation('privacy_webbkollen', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + + api_lang_code = 'en' + if langCode == 'sv': + api_lang_code = 'sv' + elif langCode == 'de': + api_lang_code = 'de' + elif langCode == 'no': + api_lang_code = 'no' + + url = 'https://webbkoll.dataskydd.net/{1}/check?url={0}'.format(url.replace('/', '%2F').replace(':', '%3A'), api_lang_code) headers = {'user-agent': 'Mozilla/5.0 (compatible; Webperf; +https://webperf.se)'} request = requests.get(url, allow_redirects=False, headers=headers, timeout=request_timeout*2) @@ -70,15 +87,15 @@ def run_test(url): mess += '* {0}'.format(re.sub(' +', ' ', line.text.strip()).replace('\n', ' ').replace(' ', '\n* ').replace('Kolla upp', '').replace(' ', ' ')) if points == 5: - review = '* Webbplatsen är bra på integritet!\n' + review = ('TEXT_REVIEW_VERY_GOOD') elif points >= 4: - review = '* Webbplatsen kan bli bättre, men är helt ok.\n' + review = _('TEXT_REVIEW_IS_GOOD') elif points >= 3: - review = '* Ok integritet men borde bli bättre.\n' + review = _('TEXT_REVIEW_IS_OK') elif points >= 2: - review = '* Dålig integritet.\n' + review = _('TEXT_REVIEW_IS_BAD') else: - review = '* Väldigt dålig integritet!\n' + review = _('TEXT_REVIEW_IS_VERY_BAD') points = 1.0 review += mess diff --git a/tests/pwa_lighthouse.py b/tests/pwa_lighthouse.py new file mode 100644 index 00000000..bfc37ea9 --- /dev/null +++ b/tests/pwa_lighthouse.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * +import gettext +_ = gettext.gettext + +# DEFAULTS +googlePageSpeedApiKey = config.googlePageSpeedApiKey + + +def run_test(langCode, url, strategy='mobile', category='pwa'): + language = gettext.translation( + 'pwa_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( + category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + \ + int(json_content['lighthouseResult']['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + #print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = _('TEXT_REVIEW_PRACTICE_VERY_GOOD') + elif fails <= 4: + points = 4 + review = _('TEXT_REVIEW_PWA_IS_GOOD') + elif fails <= 7: + points = 3 + review = _('TEXT_REVIEW_PWA_IS_OK') + elif fails <= 9: + points = 2 + review = _('TEXT_REVIEW_PWA_IS_BAD') + elif fails > 9: + points = 1 + review = _('TEXT_REVIEW_PWA_IS_VERY_BAD') + + review += _('TEXT_REVIEW_PWA_NUMBER_OF_PROBLEMS').format(fails) + + if fails != 0: + review += _('TEXT_REVIEW_PWA_PROBLEMS') + + for key, value in return_dict.items(): + if value == 0: + review += '* {}\n'.format(fail_dict[key]) + # print(key) + + return (points, review, return_dict) diff --git a/tests/seo_lighthouse.py b/tests/seo_lighthouse.py new file mode 100644 index 00000000..fbded5c8 --- /dev/null +++ b/tests/seo_lighthouse.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * +import gettext +_ = gettext.gettext + +# DEFAULTS +googlePageSpeedApiKey = config.googlePageSpeedApiKey + + +def run_test(langCode, url, strategy='mobile', category='seo'): + + language = gettext.translation( + 'seo_lighthouse', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + + check_url = url.strip() + + pagespeed_api_request = 'https://www.googleapis.com/pagespeedonline/v5/runPagespeed?category={0}&url={1}&key={2}'.format( + category, check_url, googlePageSpeedApiKey) + + get_content = '' + + try: + get_content = httpRequestGetContent(pagespeed_api_request) + except: # breaking and hoping for more luck with the next URL + print( + 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + json_content = '' + + try: + json_content = json.loads(get_content) + except: # might crash if checked resource is not a webpage + print('Error! JSON failed parsing for the URL "{0}"\nMessage:\n{1}'.format( + check_url, sys.exc_info()[0])) + pass + + return_dict = {} + + score = 0 + fails = 0 + fail_dict = {} + + for item in json_content['lighthouseResult']['audits'].keys(): + try: + return_dict[item] = json_content['lighthouseResult']['audits'][item]['score'] + + score = score + \ + int(json_content['lighthouseResult'] + ['audits'][item]['score']) + + if int(json_content['lighthouseResult']['audits'][item]['score']) == 0: + fails += 1 + fail_dict[item] = json_content['lighthouseResult']['audits'][item]['title'] + except: + # has no 'numericValue' + # print(item, 'har inget värde') + pass + + review = '' + points = 0 + + if fails == 0: + points = 5 + review = _('TEXT_REVIEW_SEO_VERY_GOOD') + elif fails <= 2: + points = 4 + review = _('TEXT_REVIEW_SEO_IS_GOOD') + elif fails <= 3: + points = 3 + review = _('TEXT_REVIEW_SEO_IS_OK') + elif fails <= 4: + points = 2 + review = _('TEXT_REVIEW_SEO_IS_BAD') + elif fails > 4: + points = 1 + review = _('TEXT_REVIEW_SEO_IS_VERY_BAD') + + review += _('TEXT_REVIEW_SEO_NUMBER_OF_PROBLEMS').format(fails) + + if fails != 0: + review += _('TEXT_REVIEW_SEO_PROBLEMS') + + for key, value in return_dict.items(): + if value != None and value < 1: + review += '* {}\n'.format(fail_dict[key]) + # print(key) + + return (points, review, return_dict) diff --git a/tests/standard_files.py b/tests/standard_files.py new file mode 100644 index 00000000..dff601f8 --- /dev/null +++ b/tests/standard_files.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +import sys +import socket +import ssl +import json +import requests +import urllib # https://docs.python.org/3/library/urllib.parse.html +import uuid +import re +from bs4 import BeautifulSoup +import config +from tests.utils import * +import gettext +_ = gettext.gettext + +# DEFAULTS +request_timeout = config.http_request_timeout +useragent = config.useragent + + +def run_test(langCode, url): + """ + Looking for: + * robots.txt + * at least one sitemap/siteindex mentioned in robots.txt + * a RSS feed mentioned in the page's meta + """ + + language = gettext.translation( + 'standard_files', localedir='locales', languages=[langCode]) + language.install() + _ = language.gettext + + print(_('TEXT_RUNNING_TEST')) + + o = urllib.parse.urlparse(url) + parsed_url = '{0}://{1}/'.format(o.scheme, o.netloc) + + review = '' + return_dict = dict() + points = 5.0 + + # robots.txt (up to -3) + robots_result = validate_robots(_, parsed_url) + points -= robots_result[0] + review += robots_result[1] + return_dict.update(robots_result[2]) + robots_content = robots_result[3] + + # sitemap.xml (up to -3) + has_robots_txt = return_dict['robots.txt'] == 'ok' + sitemap_result = validate_sitemap(_, robots_content, has_robots_txt) + points -= sitemap_result[0] + review += sitemap_result[1] + return_dict.update(sitemap_result[2]) + + # rss feed (up to -0.5) + feed_result = validate_feed(_, url) + points -= feed_result[0] + review += feed_result[1] + return_dict.update(feed_result[2]) + + # security.txt (up to -1) + security_txt_result = validate_security_txt(_, parsed_url) + points -= security_txt_result[0] + review += security_txt_result[1] + return_dict.update(security_txt_result[2]) + + # minimum score is 1, make sure we have at least 1 + if points < 1: + points = 1 + + return (points, review, return_dict) + + +def validate_robots(_, parsed_url): + review = '' + return_dict = dict() + points = 0.0 + + robots_content = httpRequestGetContent(parsed_url + 'robots.txt') + + if robots_content == None or '' in robots_content.lower() or ('user-agent' not in robots_content.lower() and 'disallow' not in robots_content.lower() and 'allow' not in robots_content.lower()): + points = 3 + review += _("TEXT_ROBOTS_MISSING") + return_dict['robots.txt'] = 'missing content' + robots_content = '' + else: + review += _("TEXT_ROBOTS_OK") + return_dict['robots.txt'] = 'ok' + + return (points, review, return_dict, robots_content) + + +def validate_sitemap(_, robots_content, has_robots_txt): + review = '' + return_dict = dict() + return_dict["num_sitemaps"] = 0 + points = 0.0 + + if robots_content == None or not has_robots_txt or 'sitemap:' not in robots_content.lower(): + points += 2 + review += _("TEXT_SITEMAP_MISSING") + return_dict['sitemap'] = 'not in robots.txt' + else: + review += _("TEXT_SITEMAP_FOUND") + return_dict['sitemap'] = 'ok' + + smap_pos = robots_content.lower().find('sitemap') + smaps = robots_content[smap_pos:].split('\n') + found_smaps = [] + for line in smaps: + if 'sitemap:' in line.lower(): + found_smaps.append( + line.lower().replace('sitemap:', '').strip()) + + return_dict["num_sitemaps"] = len(found_smaps) + + if len(found_smaps) > 0: + return_dict["sitemaps"] = found_smaps + + smap_content = httpRequestGetContent(found_smaps[0]) + + if not is_sitemap(smap_content): + points += 1 + review += _("TEXT_SITEMAP_BROKEN") + return_dict['sitemap_check'] = '\'{0}\' seem to be broken'.format( + found_smaps[0]) + else: + review += _("TEXT_SITEMAP_OK") + return_dict['sitemap_check'] = '\'{0}\' seem ok'.format( + found_smaps[0]) + + return (points, review, return_dict) + + +def validate_feed(_, url): + # TODO: validate first feed + + review = '' + return_dict = dict() + points = 0.0 + feed = list() + + headers = {'user-agent': config.useragent} + try: + request = requests.get(url, allow_redirects=True, + headers=headers, timeout=request_timeout) + soup = BeautifulSoup(request.text, 'lxml') + # feed = soup.find_all(rel='alternate') + feed = soup.find_all("link", {"type": "application/rss+xml"}) + + except: + #print('Exception looking for feed, probably connection problems') + pass + + if len(feed) == 0: + points = 0.5 + review += _("TEXT_RSS_FEED_MISSING") + return_dict['feed'] = 'not in meta' + return_dict['num_feeds'] = len(feed) + elif len(feed) > 0: + review += _("TEXT_RSS_FEED_FOUND") + return_dict['feed'] = 'found in meta' + return_dict['num_feeds'] = len(feed) + tmp_feed = [] + for single_feed in feed: + tmp_feed.append(single_feed.get('href')) + + return_dict['feeds'] = tmp_feed + + return (points, review, return_dict) + + +def validate_security_txt(_, parsed_url): + review = '' + return_dict = dict() + points = 0.0 + + security_wellknown_request = False + security_root_request = False + + headers = { + 'user-agent': useragent} + # normal location for security.txt + security_wellknown_url = parsed_url + '.well-known/security.txt' + try: + security_wellknown_request = requests.get(security_wellknown_url, allow_redirects=True, + headers=headers, timeout=request_timeout) + except: + #print('Exception looking for security.txt, probably connection problems') + pass + + security_wellknown_content = httpRequestGetContent( + security_wellknown_url) + + # security.txt can also be placed in root if for example technical reasons prohibit use of /.well-known/ + security_root_url = parsed_url + 'security.txt' + try: + security_root_request = requests.get(security_root_url, allow_redirects=True, + headers=headers, timeout=request_timeout) + except: + #print('Exception looking for security.txt, probably connection problems') + pass + security_root_content = httpRequestGetContent(security_root_url) + + #print('security_wellknown_content:' + security_wellknown_content) + #print('security_root_content:' + security_root_content) + + if not security_wellknown_request and not security_root_request: + # Can't find security.txt (not giving us 200 as status code) + points = 1.0 + review += _("TEXT_SECURITY_MISSING") + return_dict['security.txt'] = 'missing' + elif (security_wellknown_content == None or ('' in security_wellknown_content.lower()) or (security_root_content == None or ('' in security_root_content.lower()))): + # Html (404 page?) content instead of expected content + points = 1.0 + review += _("TEXT_SECURITY_WRONG_CONTENT") + return_dict['security.txt'] = 'wrong content' + elif (('Contact:' in security_wellknown_content and 'Expires:' in security_wellknown_content.lower()) or (('Contact:' in security_root_content and 'Expires:' in security_root_content.lower()))): + # Everything seems ok + review += _("TEXT_SECURITY_OK_CONTENT") + return_dict['security.txt'] = 'ok' + elif (not ('Contact:' in security_wellknown_content) and (not ('Contact:' in security_root_content))): + # Missing required Contact + points = 0.5 + review += _("TEXT_SECURITY_REQUIRED_CONTACT_MISSING") + return_dict['security.txt'] = 'required contact missing' + elif (not ('Expires:' in security_wellknown_content) or (not ('Expires:' in security_root_content))): + # Missing required Expires (added in version 10 of draft) + points = 0.25 + review += _("TEXT_SECURITY_REQUIRED_EXPIRES_MISSING") + return_dict['security.txt'] = 'required expires missing' + # print('* security.txt required content is missing') + + # print(security_wellknown_content) + # print('* security.txt seems ok') + + return (points, review, return_dict) diff --git a/tests/utils.py b/tests/utils.py index eba08ee0..c6427d92 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,53 +1,77 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import sys import socket import ssl import json import requests -import urllib # https://docs.python.org/3/library/urllib.parse.html +import urllib # https://docs.python.org/3/library/urllib.parse.html import uuid import re from bs4 import BeautifulSoup import config -### DEFAULTS +# DEFAULTS request_timeout = config.http_request_timeout +useragent = config.useragent googlePageSpeedApiKey = config.googlePageSpeedApiKey + def httpRequestGetContent(url): """Trying to fetch the response content Attributes: url, as for the URL to fetch """ try: - a = requests.get(url) + headers = {'user-agent': useragent} + a = requests.get(url, allow_redirects=False, + headers=headers, timeout=request_timeout*2) + #a = requests.get(url, timeout=request_timeout) return a.text except requests.exceptions.SSLError: - if 'http://' in url: # trying the same URL over SSL/TLS + if 'http://' in url: # trying the same URL over SSL/TLS print('Info: Trying SSL before giving up.') return httpRequestGetContent(url.replace('http://', 'https://')) except requests.exceptions.ConnectionError: + if 'http://' in url: # trying the same URL over SSL/TLS + print('Connection error! Info: Trying SSL before giving up.') + return httpRequestGetContent(url.replace('http://', 'https://')) print( 'Connection error! Unfortunately the request for URL "{0}" failed.\nMessage:\n{1}'.format(url, sys.exc_info()[0])) pass except: print( - 'Error! Unfortunately the request for URL "{0}" either timed out or failed for other reason(s). The timeout is set to {1} seconds.\nMessage:\n{2}'.format(url, timeout_in_seconds, sys.exc_info()[0])) + 'Error! Unfortunately the request for URL "{0}" either timed out or failed for other reason(s). The timeout is set to {1} seconds.\nMessage:\n{2}'.format(url, request_timeout, sys.exc_info()[0])) pass + def get_guid(length): """ Generates a unique string in specified length """ return str(uuid.uuid4())[0:length] + def convert_to_seconds(millis, return_with_seconds=True): """ Converts milliseconds to seconds. Arg: 'return_with_seconds' defaults to True and returns string ' sekunder' after the seconds """ if return_with_seconds: - return (millis/1000)%60 + " sekunder" + return (millis/1000) % 60 + " sekunder" else: - return (millis/1000)%60 + return (millis/1000) % 60 + + +def is_sitemap(content): + """Check a string to see if its content is a sitemap or siteindex. + + Attributes: content (string) + """ + try: + if 'www.sitemaps.org/schemas/sitemap/' in content or '